diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9e32a797f..000000000 --- a/.travis.yml +++ /dev/null @@ -1,63 +0,0 @@ -language: cpp - -sudo: false - -git: - submodules: true - depth: 10 - -matrix: - include: - - os: linux - compiler: clang - - os: osx - compiler: clang - -env: - global: - - secure: "CqhZDPctJcpXGPpmIPK5usD/O+2HYawW3434oDufVS9uG/+C7aHzKzi8cuZ7n/REHqJMzy7gJfp6DiyF2QowpnN1L2W0FSJ9VOgj4JQF2Wsupo6gJkq6/CW2Fa35PhQHsv29bfyqtIq+R5SBVAieBe/Lh2P144RwRliGRopGQ68=" - - secure: "idk4fdU49i546Zs6Fxha14H05eRJ1G/D6NPRaie8M8o+xySnEqf+TyA9/HU8QH7cFvroSLuHJ1U7TmwnR+sXy4XBlIfHLi4u2MN+l/q014GG7T2E2xYcTauqjB4ldToRsDQwe5Dq0gZCMsHLPspWPjL9twfp+Ds7qgcFhTsct0s=" - -addons: - postgresql: "9.4" - apt: - sources: - - ubuntu-toolchain-r-test - - llvm-toolchain-precise-3.5 - packages: - - clang-3.5 - -before_install: - - export COMMIT_MESSAGE=$(git show -s --format=%B $TRAVIS_COMMIT | tr -d '\n') - - export MASON_BUILD=true - - if [[ $(uname -s) == 'Linux' ]]; then - psql -U postgres -c 'create database template_postgis;' -U postgres; - psql -U postgres -c 'create extension postgis;' -d template_postgis -U postgres; - export CXX="clang++-3.5"; - export CC="clang++-3.5"; - export PYTHONPATH=$(pwd)/mason_packages/.link/lib/python2.7/site-packages; - else - export PYTHONPATH=$(pwd)/mason_packages/.link/lib/python/site-packages; - fi; - - PYTHONUSERBASE=$(pwd)/mason_packages/.link pip install --user nose - - PYTHONUSERBASE=$(pwd)/mason_packages/.link pip install --user wheel - - PYTHONUSERBASE=$(pwd)/mason_packages/.link pip install --user twine - - python --version - -install: - - python setup.py install --prefix $(pwd)/mason_packages/.link - -before_script: - - python test/run_tests.py -q - -script: - - python test/visual.py -q - - if [[ ${COMMIT_MESSAGE} =~ "[publish]" ]]; then - python setup.py bdist_wheel; - if [[ $(uname -s) == 'Linux' ]]; then - export PRE_DISTS='dist/*.whl'; - rename 's/linux_x86_64/any/;' $PRE_DISTS; - fi; - export DISTS='dist/*'; - $(pwd)/mason_packages/.link/bin/twine upload -u $PYPI_USER -p $PYPI_PASSWORD $DISTS ; - fi; diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..7f8223983 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,34 @@ +# Contributing + +General guidelines for contributing to python-mapnik + +## Coding Conventions + +See the [Mapnik guide](https://github.com/mapnik/mapnik/blob/master/docs/contributing.md#coding-conventions). + +### Python Style Guide + +All python code should follow PEP8 as closely as possible. However, we do not strictly enforce all PEP8 such as 80 characters per line. + +## Testing + +In order for any code to be pulled into master it must contain tests for **100%** of all lines. The only lines that are not required to be tested are those that cover extreme cases which can not be tested with regularity, such as race conditions. + +If this case does occur you can put a comment block such as shown below to exclude the lines from test coverage. + +```C++ +// LCOV_EXCL_START +can_not_reach_code(); +// LCOV_EXCL_END +``` + +## Releasing + +To release a new python-mapnik version: + +Currently just hit up @flippmoke, this section will be filled out ASAP! + +### Documentation + +TODO: Write documentation on how to update documentation. + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..53b088ef0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include src/*.hpp +exclude packaging/mapnik/bin/* +exclude packaging/mapnik/lib/libmapnik* +exclude packaging/mapnik/lib/mapnik/input/* diff --git a/README.md b/README.md index fa3ba46ff..1324e021f 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,25 @@ +**New** Python bindings for Mapnik **[WIP]** -[![Build Status](https://travis-ci.org/mapnik/python-mapnik.svg)](https://travis-ci.org/mapnik/python-mapnik) - -Python bindings for Mapnik. +https://github.com/pybind/pybind11 ## Installation -Eventually we hope that many people will simply be able to `pip install mapnik` in order to get prebuilt binaries, -this currently does not work though. So for now here are the instructions - -### Create a virtual environment - -It is highly suggested that you [a python virtualenv](http://docs.python-guide.org/en/latest/dev/virtualenvs/) when developing -on mapnik. - -### Building from Mason - -If you do not have mapnik built from source and simply wish to develop from the latest version in [mapnik master branch](https://github.com/mapnik/mapnik) you can setup your environment with a mason build. In order to trigger a mason build prior to building you must set the `MASON_BUILD` environment variable. - -```bash -export MASON_BUILD=true -``` - -After this is done simply follow the directions as per a source build. - ### Building from Source -Assuming that you built your own mapnik from source, and you have run `make install`. Set any compiler or linking environment variables as necessary so that your installation of mapnik is found. Next simply run one of the two methods: - -``` -python setup.py develop -``` - -If you wish to are currently developing on mapnik-python and wish to change the code in place and immediately have python changes reflected in your environment. - +Make sure 'mapnik-config' is present and accessible via $PATH env variable ``` -python setup.py develop --uninstall +pip install . -v ``` -Will de-activate the development install by removing the `python-mapnik` entry from `site-packages/easy-install.pth`. - - -``` -python setup.py install -``` - -If you wish to just install the package - ## Testing Once you have installed you can test the package by running: ``` -git submodule update --init -python setup.py test +pytest test/python_tests/ ``` -The test data in `./test/data` and `./test/data-visual` are standalone modules. If you need to update them see https://github.com/mapnik/mapnik/blob/master/docs/contributing.markdown#testing - -### Troubleshooting -If you hit an error like: - -``` -Fatal Python error: PyThreadState_Get: no current thread -Abort trap: 6 -``` - -That means you likely have built python-mapnik is linked against a differ python version than what you are running. To solve this try running: - -``` -/usr/bin/python -``` -If you still hit a problem create an issue and we'll try to help. diff --git a/bootstrap.sh b/bootstrap.sh deleted file mode 100755 index 806f3f567..000000000 --- a/bootstrap.sh +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env bash - -function setup_mason() { - if [[ ! -d ./.mason ]]; then - git clone --depth 1 https://github.com/mapbox/mason.git ./.mason - else - echo "Updating to latest mason" - (cd ./.mason && git pull) - fi - export MASON_DIR=$(pwd)/.mason - export PATH=$(pwd)/.mason:$PATH - export CXX=${CXX:-clang++} - export CC=${CXX:-clang++} -} - -function install() { - MASON_PLATFORM_ID=$(mason env MASON_PLATFORM_ID) - if [[ ! -d ./mason_packages/${MASON_PLATFORM_ID}/${1}/ ]]; then - mason install $1 $2 - mason link $1 $2 - fi -} - -function install_mason_deps() { - install mapnik 3.0.0-rc3 - install protobuf 2.6.1 - install freetype 2.5.4 - install harfbuzz 2cd5323 - install jpeg_turbo 1.4.0 - install libxml2 2.9.2 - install libpng 1.6.16 - install webp 0.4.2 - install icu 54.1 - install proj 4.8.0 - install libtiff 4.0.4beta - install boost 1.57.0 - install boost_libsystem 1.57.0 - install boost_libthread 1.57.0 - install boost_libfilesystem 1.57.0 - install boost_libprogram_options 1.57.0 - install boost_libpython 1.57.0 - install boost_libregex 1.57.0 - install boost_libpython 1.57.0 - install pixman 0.32.6 - install cairo 1.12.18 -} - -function setup_runtime_settings() { - local MASON_LINKED_ABS=$(pwd)/mason_packages/.link - export PROJ_LIB=${MASON_LINKED_ABS}/share/proj - export ICU_DATA=${MASON_LINKED_ABS}/share/icu/54.1 - export GDAL_DATA=${MASON_LINKED_ABS}/share/gdal - if [[ $(uname -s) == 'Darwin' ]]; then - export DYLD_LIBRARY_PATH=$(pwd)/mason_packages/.link/lib:${DYLD_LIBRARY_PATH} - else - export LD_LIBRARY_PATH=$(pwd)/mason_packages/.link/lib:${LD_LIBRARY_PATH} - fi - export PATH=$(pwd)/mason_packages/.link/bin:${PATH} -} - -function main() { - setup_mason - install_mason_deps - setup_runtime_settings - echo "Ready, now run:" - echo "" - echo " make test" -} - -main diff --git a/build.py b/build.py deleted file mode 100644 index 0f94826b6..000000000 --- a/build.py +++ /dev/null @@ -1,120 +0,0 @@ -import glob -import os -from subprocess import Popen, PIPE -from distutils import sysconfig - -Import('env') - -def call(cmd, silent=True): - stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() - if not stderr: - return stdin.strip() - elif not silent: - print stderr - - -prefix = env['PREFIX'] -target_path = os.path.normpath(sysconfig.get_python_lib() + os.path.sep + env['MAPNIK_NAME']) - -py_env = env.Clone() - -py_env.Append(CPPPATH = sysconfig.get_python_inc()) - -py_env.Append(CPPDEFINES = env['LIBMAPNIK_DEFINES']) - -py_env['LIBS'] = [env['MAPNIK_NAME'],'libboost_python'] - -link_all_libs = env['LINKING'] == 'static' or env['RUNTIME_LINK'] == 'static' - -# even though boost_thread is no longer used in mapnik core -# we need to link in for boost_python to avoid missing symbol: _ZN5boost6detail12get_tss_dataEPKv / boost::detail::get_tss_data -py_env.AppendUnique(LIBS = 'boost_thread%s' % env['BOOST_APPEND']) - -if link_all_libs: - py_env.AppendUnique(LIBS=env['LIBMAPNIK_LIBS']) - -# note: on linux -lrt must be linked after thread to avoid: undefined symbol: clock_gettime -if env['RUNTIME_LINK'] == 'static' and env['PLATFORM'] == 'Linux': - py_env.AppendUnique(LIBS='rt') - -# TODO - do solaris/fedora need direct linking too? -python_link_flag = '' -if env['PLATFORM'] == 'Darwin': - python_link_flag = '-undefined dynamic_lookup' - -paths = ''' -"""Configuration paths of Mapnik fonts and input plugins (auto-generated by SCons).""" - -from os.path import normpath,join,dirname - -mapniklibpath = '%s' -mapniklibpath = normpath(join(dirname(__file__),mapniklibpath)) -''' - -paths += "inputpluginspath = join(mapniklibpath,'input')\n" - -if env['SYSTEM_FONTS']: - paths += "fontscollectionpath = normpath('%s')\n" % env['SYSTEM_FONTS'] -else: - paths += "fontscollectionpath = join(mapniklibpath,'fonts')\n" - -paths += "__all__ = [mapniklibpath,inputpluginspath,fontscollectionpath]\n" - -if not os.path.exists(env['MAPNIK_NAME']): - os.mkdir(env['MAPNIK_NAME']) - -file('mapnik/paths.py','w').write(paths % (env['MAPNIK_LIB_DIR'])) - -# force open perms temporarily so that `sudo scons install` -# does not later break simple non-install non-sudo rebuild -try: - os.chmod('mapnik/paths.py',0666) -except: pass - -# install the shared object beside the module directory -sources = glob.glob('src/*.cpp') - -if 'install' in COMMAND_LINE_TARGETS: - # install the core mapnik python files, including '__init__.py' - init_files = glob.glob('mapnik/*.py') - if 'mapnik/paths.py' in init_files: - init_files.remove('mapnik/paths.py') - init_module = env.Install(target_path, init_files) - env.Alias(target='install', source=init_module) - # fix perms and install the custom generated 'paths.py' - targetp = os.path.join(target_path,'paths.py') - env.Alias("install", targetp) - # use env.Command rather than env.Install - # to enable setting proper perms on `paths.py` - env.Command( targetp, 'mapnik/paths.py', - [ - Copy("$TARGET","$SOURCE"), - Chmod("$TARGET", 0644), - ]) - -if 'uninstall' not in COMMAND_LINE_TARGETS: - if env['HAS_CAIRO']: - py_env.Append(CPPPATH = env['CAIRO_CPPPATHS']) - py_env.Append(CPPDEFINES = '-DHAVE_CAIRO') - if link_all_libs: - py_env.Append(LIBS=env['CAIRO_ALL_LIBS']) - - if env['HAS_PYCAIRO']: - py_env.Append(CPPDEFINES = '-DHAVE_PYCAIRO') - py_env.Append(CPPPATH = env['PYCAIRO_PATHS']) - -py_env.Append(LINKFLAGS=python_link_flag) -py_env.AppendUnique(LIBS='mapnik-json') -py_env.AppendUnique(LIBS='mapnik-wkt') - -_mapnik = py_env.LoadableModule('mapnik/_mapnik', sources, LDMODULEPREFIX='', LDMODULESUFFIX='.so') - -Depends(_mapnik, env.subst('../../src/%s' % env['MAPNIK_LIB_NAME'])) -Depends(_mapnik, env.subst('../../src/json/libmapnik-json${LIBSUFFIX}')) -Depends(_mapnik, env.subst('../../src/wkt/libmapnik-wkt${LIBSUFFIX}')) - -if 'uninstall' not in COMMAND_LINE_TARGETS: - pymapniklib = env.Install(target_path,_mapnik) - py_env.Alias(target='install',source=pymapniklib) - -env['create_uninstall_target'](env, target_path) diff --git a/demo/data/COPYRIGHT.txt b/demo/data/COPYRIGHT.txt new file mode 100644 index 000000000..4758477d6 --- /dev/null +++ b/demo/data/COPYRIGHT.txt @@ -0,0 +1,3 @@ +© 2003. Government of Canada with permission from Natural Resources Canada + +Original License: http://geogratis.cgdi.gc.ca/geogratis/en/licence.jsp diff --git a/demo/data/boundaries.dbf b/demo/data/boundaries.dbf new file mode 100644 index 000000000..04d2ed661 Binary files /dev/null and b/demo/data/boundaries.dbf differ diff --git a/demo/data/boundaries.shp b/demo/data/boundaries.shp new file mode 100644 index 000000000..48fcfdb87 Binary files /dev/null and b/demo/data/boundaries.shp differ diff --git a/demo/data/boundaries.shx b/demo/data/boundaries.shx new file mode 100644 index 000000000..9a6bd46d7 Binary files /dev/null and b/demo/data/boundaries.shx differ diff --git a/demo/data/boundaries_l.dbf b/demo/data/boundaries_l.dbf new file mode 100644 index 000000000..44d4ca8e1 Binary files /dev/null and b/demo/data/boundaries_l.dbf differ diff --git a/demo/data/boundaries_l.shp b/demo/data/boundaries_l.shp new file mode 100644 index 000000000..9951fe8ff Binary files /dev/null and b/demo/data/boundaries_l.shp differ diff --git a/demo/data/boundaries_l.shx b/demo/data/boundaries_l.shx new file mode 100644 index 000000000..d0f18f8a9 Binary files /dev/null and b/demo/data/boundaries_l.shx differ diff --git a/demo/data/ontdrainage.dbf b/demo/data/ontdrainage.dbf new file mode 100644 index 000000000..819265027 Binary files /dev/null and b/demo/data/ontdrainage.dbf differ diff --git a/demo/data/ontdrainage.shp b/demo/data/ontdrainage.shp new file mode 100644 index 000000000..758d37ed0 Binary files /dev/null and b/demo/data/ontdrainage.shp differ diff --git a/demo/data/ontdrainage.shx b/demo/data/ontdrainage.shx new file mode 100644 index 000000000..9f11f920e Binary files /dev/null and b/demo/data/ontdrainage.shx differ diff --git a/demo/data/popplaces.dbf b/demo/data/popplaces.dbf new file mode 100644 index 000000000..d385b6f1d Binary files /dev/null and b/demo/data/popplaces.dbf differ diff --git a/demo/data/popplaces.shp b/demo/data/popplaces.shp new file mode 100644 index 000000000..3c3f7ac44 Binary files /dev/null and b/demo/data/popplaces.shp differ diff --git a/demo/data/popplaces.shx b/demo/data/popplaces.shx new file mode 100644 index 000000000..f36ff6b4c Binary files /dev/null and b/demo/data/popplaces.shx differ diff --git a/demo/data/qcdrainage.dbf b/demo/data/qcdrainage.dbf new file mode 100644 index 000000000..04fcf8541 Binary files /dev/null and b/demo/data/qcdrainage.dbf differ diff --git a/demo/data/qcdrainage.shp b/demo/data/qcdrainage.shp new file mode 100644 index 000000000..ce685f8bb Binary files /dev/null and b/demo/data/qcdrainage.shp differ diff --git a/demo/data/qcdrainage.shx b/demo/data/qcdrainage.shx new file mode 100644 index 000000000..295f57005 Binary files /dev/null and b/demo/data/qcdrainage.shx differ diff --git a/demo/data/roads.dbf b/demo/data/roads.dbf new file mode 100644 index 000000000..709f88182 Binary files /dev/null and b/demo/data/roads.dbf differ diff --git a/demo/data/roads.shp b/demo/data/roads.shp new file mode 100644 index 000000000..6f7e2a2bf Binary files /dev/null and b/demo/data/roads.shp differ diff --git a/demo/data/roads.shx b/demo/data/roads.shx new file mode 100644 index 000000000..5d4b156fa Binary files /dev/null and b/demo/data/roads.shx differ diff --git a/demo/python/README.txt b/demo/python/README.txt new file mode 100644 index 000000000..4a13579cb --- /dev/null +++ b/demo/python/README.txt @@ -0,0 +1,20 @@ +This directory contains a sample python script implementing the Mapnik API. + +The script is thoroughly commented and also acts as a mini tutorial. Reading +it should get you on your way, and you can use it as a base for your work. + +You must compile and install mapnik and the python bindings FIRST. + +Once this is done, run it: + +/path/to/python rundemo.py + +If all goes well, it should render 2 map images: + +demo.jpg +demo.png + +Have a look! + +Cheers, +J.F. diff --git a/demo/python/rundemo.py b/demo/python/rundemo.py new file mode 100755 index 000000000..057dc0ecd --- /dev/null +++ b/demo/python/rundemo.py @@ -0,0 +1,389 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# +# This file is part of Mapnik (c++ mapping toolkit) +# Copyright (C) 2005 Jean-Francois Doyon +# Copyright (C) 2024 Artem Pavlenko + +# Mapnik is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA + +import sys +from os import path +import mapnik + +# Instanciate a map, giving it a width and height. Remember: the word "map" is +# reserved in Python! :) + +root = path.dirname(__file__) +m = mapnik.Map(800,600,"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs") + +# Set its background colour. More on colours later ... + +m.background = 'white' #Color(R=255,G=255,B=255,A=255) + +# Now we can start adding layers, in stacking order (i.e. bottom layer first) + +# Canadian Provinces (Polygons) + +# Instanciate a layer. The parameters depend on the type of data: +# shape: +# type='shape' +# file='/path/to/shape' +# raster: +# type='raster' +# file='/path/to/raster' +# postgis: +# type='postgis' +# host='127.0.0.1' +# dbname='mydatabase' +# user='myusername' +# password='mypassword' +# table= TODO + +provpoly_lyr = mapnik.Layer('Provinces') +provpoly_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +provpoly_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/boundaries')) + +# We then define a style for the layer. A layer can have one or many styles. +# Styles are named, so they can be shared across different layers. +# Multiple styles per layer behaves functionally like multiple layers. The +# data is completely re-scanned for each style within one layer, and a style +# will be drawn entirely "above" the previous one. Performance wise using +# multiple styles in one layer is the same has having multiple layers. +# The paradigm is useful mostly as a convenience. + +provpoly_style = mapnik.Style() + +# A Style needs one or more rules. A rule will normally consist of a filter +# for feature selection, and one or more symbolizers. + +provpoly_rule_on = mapnik.Rule() + +# A Expression() allows the selection of features to which the symbology will +# be applied. More on Mapnik expressions can be found in Tutorial #2. +# A given feature can only match one filter per rule per style. + +provpoly_rule_on.filter = mapnik.Expression("[NAME_EN] = 'Ontario'") + +# Here a symbolizer is defined. Available are: +# - LineSymbolizer(Color(),) +# - LineSymbolizer(Stroke()) +# - PolygonSymbolizer(Color()) +# - PointSymbolizer(,,,) + +# Some of them can accept a Color() instance, which can be created with: +# - Color(, , ) +# - Color(, , , ) +# - Color() where will be something like '#00FF00' +# or '#0f0' or 'green' + +sym = mapnik.PolygonSymbolizer() +sym.fill = mapnik.Color(250, 190, 183); +provpoly_rule_on.symbolizers.append(sym) +provpoly_style.rules.append(provpoly_rule_on) + +provpoly_rule_qc = mapnik.Rule() +provpoly_rule_qc.filter = mapnik.Expression("[NOM_FR] = 'Québec'") +sym = mapnik.PolygonSymbolizer() +sym.fill = 'rgb(217, 235, 203)' +provpoly_rule_qc.symbolizers.append(sym) +provpoly_style.rules.append(provpoly_rule_qc) + +# Add the style to the map, giving it a name. This is the name that will be +# used to refer to it from here on. Having named styles allows them to be +# re-used throughout the map. + +m.append_style('provinces', provpoly_style) + +# Then associate the style to the layer itself. + +provpoly_lyr.styles.append('provinces') + +# Then add the layer to the map. In reality, it's the order in which you +# append them to the map that will determine the drawing order, though by +# convention it is recommended to define them in drawing order as well. + +m.layers.append(provpoly_lyr) + +# Drainage + +# A simple example ... + +qcdrain_lyr = mapnik.Layer('Quebec Hydrography') +qcdrain_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +qcdrain_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/qcdrainage')) + +qcdrain_style = mapnik.Style() +qcdrain_rule = mapnik.Rule() +qcdrain_rule.filter = mapnik.Expression('[HYC] = 8') +sym = mapnik.PolygonSymbolizer() +sym.fill = 'rgba(153, 204, 255, 255)' +sym.smooth = 1.0 # very smooth +qcdrain_rule.symbolizers.append(sym) +qcdrain_style.rules.append(qcdrain_rule) + +m.append_style('drainage', qcdrain_style) +qcdrain_lyr.styles.append('drainage') +m.layers.append(qcdrain_lyr) + +# In this case, we have 2 data sets with similar schemas (same filtering +# attributes, and same desired style), so we're going to +# re-use the style defined in the above layer for the next one. + +ondrain_lyr = mapnik.Layer('Ontario Hydrography') +ondrain_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +ondrain_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/ontdrainage')) + +ondrain_lyr.styles.append('drainage') +m.layers.append(ondrain_lyr) + +# Provincial boundaries + +provlines_lyr = mapnik.Layer('Provincial borders') +provlines_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +provlines_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/boundaries_l')) + +# Here we define a "dash dot dot dash" pattern for the provincial boundaries. + +provlines_style = mapnik.Style() +provlines_rule = mapnik.Rule() +sym = mapnik.LineSymbolizer() +# FIXME - currently adding dash arrays is broken +# https://github.com/mapnik/mapnik/issues/2324 +sym.stroke = 'black' +sym.stroke_width = 1 +sym.stroke_dasharray="8 4 2 2 2 2" +provlines_rule.symbolizers.append(sym) +provlines_style.rules.append(provlines_rule) + +m.append_style('provlines', provlines_style) +provlines_lyr.styles.append('provlines') +m.layers.append(provlines_lyr) + +# Roads 3 and 4 (The "grey" roads) + +roads34_lyr = mapnik.Layer('Roads') +roads34_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +# create roads datasource (we're going to re-use it later) + +roads34_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/roads')) + +roads34_style = mapnik.Style() +roads34_rule = mapnik.Rule() +roads34_rule.filter = mapnik.Expression('([CLASS] = 3) or ([CLASS] = 4)') + +# With lines of a certain width, you can control how the ends +# are closed off using line_cap as below. + +# Available options are: +# line_cap: BUTT_CAP, SQUARE_CAP, ROUND_CAP +# line_join: MITER_JOIN, MITER_REVERT_JOIN, ROUND_JOIN, BEVEL_JOIN + +# And one last Stroke() attribute not used here is "opacity", which +# can be set to a numerical value. + +sym = mapnik.LineSymbolizer() +sym.stroke = mapnik.Color(171,158,137) +sym.stroke_width = 2 +sym.stroke_linecap = mapnik.stroke_linecap.ROUND_CAP + +roads34_rule.symbolizers.append(sym) +roads34_style.rules.append(roads34_rule) + +m.append_style('smallroads', roads34_style) +roads34_lyr.styles.append('smallroads') +m.layers.append(roads34_lyr) + +# Roads 2 (The thin yellow ones) + +roads2_lyr = mapnik.Layer('Roads') +roads2_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +# Just get a copy from roads34_lyr +roads2_lyr.datasource = roads34_lyr.datasource + +roads2_style_1 = mapnik.Style() +roads2_rule_1 = mapnik.Rule() +roads2_rule_1.filter = mapnik.Expression('[CLASS] = 2') + +sym = mapnik.LineSymbolizer() +sym.stroke = 'rgb(171,158,137)' #mapnik.Color(R=171,G=158,B=137,A=255) +sym.stroke_width = 4 +sym.stroke_linecap = mapnik.stroke_linecap.ROUND_CAP +roads2_rule_1.symbolizers.append(sym) +roads2_style_1.rules.append(roads2_rule_1) + +m.append_style('road-border', roads2_style_1) + +roads2_style_2 = mapnik.Style() +roads2_rule_2 = mapnik.Rule() +roads2_rule_2.filter = mapnik.Expression('[CLASS] = 2') +sym = mapnik.LineSymbolizer() +sym.stroke = 'rgb(100%,98%,45%)' #mapnik.Color(R=255,G=250,B=115,A=255) +sym.stroke_linecap = mapnik.stroke_linecap.ROUND_CAP +sym.stroke_width = 2 +roads2_rule_2.symbolizers.append(sym) +roads2_style_2.rules.append(roads2_rule_2) + +m.append_style('road-fill', roads2_style_2) + +roads2_lyr.styles.append('road-border') +roads2_lyr.styles.append('road-fill') + +m.layers.append(roads2_lyr) + +# Roads 1 (The big orange ones, the highways) + +roads1_lyr = mapnik.Layer('Roads') +roads1_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +roads1_lyr.datasource = roads34_lyr.datasource + +roads1_style_1 = mapnik.Style() +roads1_rule_1 = mapnik.Rule() +roads1_rule_1.filter = mapnik.Expression('[CLASS] = 1') +sym = mapnik.LineSymbolizer() +sym.stroke = mapnik.Color(188,149,28) +sym.stroke_linecap = mapnik.stroke_linecap.ROUND_CAP +sym.stroke_width = 7 +roads1_rule_1.symbolizers.append(sym) +roads1_style_1.rules.append(roads1_rule_1) +m.append_style('highway-border', roads1_style_1) + +roads1_style_2 = mapnik.Style() +roads1_rule_2 = mapnik.Rule() +roads1_rule_2.filter = mapnik.Expression('[CLASS] = 1') +sym.stroke = mapnik.Color(242,191,36) +sym.stroke_linecap = mapnik.stroke_linecap.ROUND_CAP +sym.stroke_width = 5 +roads1_rule_2.symbolizers.append(sym) +roads1_style_2.rules.append(roads1_rule_2) + +m.append_style('highway-fill', roads1_style_2) + +roads1_lyr.styles.append('highway-border') +roads1_lyr.styles.append('highway-fill') + +m.layers.append(roads1_lyr) + +# Populated Places + +popplaces_lyr = mapnik.Layer('Populated Places') +popplaces_lyr.srs = "+proj=lcc +ellps=GRS80 +lat_0=49 +lon_0=-95 +lat+1=49 +lat_2=77 +datum=NAD83 +units=m +no_defs" +popplaces_lyr.datasource = mapnik.Shapefile(file=path.join(root,'../data/popplaces')) + +popplaces_style = mapnik.Style() +popplaces_rule = mapnik.Rule() + +# And here we have a TextSymbolizer, used for labeling. +# The first parameter is the name of the attribute to use as the source of the +# text to label with. Then there is font size in points (I think?), and colour. + +# TODO - currently broken: https://github.com/mapnik/mapnik/issues/2324 + +popplaces_text_sym = mapnik.TextSymbolizer() + +popplaces_text_sym.placement_finder = mapnik.PlacementFinder() +popplaces_text_sym.placement_finder.face_name = 'DejaVu Sans Book' +popplaces_text_sym.placement_finder.text_size = 10 +popplaces_text_sym.placement_finder.halo_fill = 'rgba(100%,100%,78.5%,1.0)' #mapnik.Color(R=255,G=255,B=200,A=255) +popplaces_text_sym.placement_finder.halo_radius = 1.0 +popplaces_text_sym.placement_finder.fill = "black" +popplaces_text_sym.placement_finder.format_expression = "[GEONAME]" + + +# We set a "halo" around the text, which looks like an outline if thin enough, +# or an outright background if large enough. +#popplaces_text_sym.label_placement= mapnik.label_placement.POINT_PLACEMENT +#popplaces_text_sym.halo_fill = mapnik.Color(255,255,200) +#popplaces_text_sym.halo_radius = 1 +#popplaces_text_sym.avoid_edges = True +#popplaces_text_sym.minimum_padding = 30 + +popplaces_rule.symbolizers.append(popplaces_text_sym) + +popplaces_style.rules.append(popplaces_rule) + +m.append_style('popplaces', popplaces_style) +popplaces_lyr.styles.append('popplaces') +m.layers.append(popplaces_lyr) + +# Draw map + +# Set the initial extent of the map in 'master' spherical Mercator projection +m.zoom_to_box(mapnik.Box2d(-8024477.28459,5445190.38849,-7381388.20071,5662941.44855)) + +# Render map +im = mapnik.Image(m.width,m.height) +mapnik.render(m, im) + +# Save image to files +images_ = [] +if mapnik.has_png(): + im.save('demo.png', 'png') # true-colour RGBA + images_.append('demo.png') + + # old behavior, now can do 'png8:c=256' + im.save('demo256.png', 'png256') # save to palette based (max 256 colours) png + images_.append('demo256.png') + + im.save('demo64_binary_transparency.png', 'png8:c=64:t=1') + images_.append('demo64_binary_transparency.png') + + im.save('demo128_colors_hextree_no_alpha.png', 'png8:c=100:m=h:t=0') + images_.append('demo128_colors_hextree_no_alpha.png') + +if mapnik.has_jpeg(): + im.save('demo_high.jpg', 'jpeg100') + images_.append('demo_high.jpg') + + im.save('demo_low.jpg', 'jpeg50') + images_.append('demo_low.jpg') + +if mapnik.has_tiff(): + im.save('demo.tif', 'tiff') + images_.append('demo.tif') + +if mapnik.has_webp(): + im.save('demo.webp', 'webp') # default quality is 90 + images_.append('demo.webp') + + im.save('demo_highest.webp', 'webp:quality=100') + images_.append('demo_med.webp') + + im.save('demo_low.webp', 'webp:quality=50') + images_.append('demo_low.webp') + + +# Render cairo examples +if mapnik.has_cairo(): + mapnik.render_to_file(m,'demo.pdf') + images_.append('demo.pdf') + mapnik.render_to_file(m,'demo.ps') + images_.append('demo.ps') + mapnik.render_to_file(m,'demo.svg') + images_.append('demo.svg') + mapnik.render_to_file(m,'demo_cairo_rgb24.png','RGB24') + images_.append('demo_cairo_rgb24.png') + mapnik.render_to_file(m,'demo_cairo_argb32.png','ARGB32') + images_.append('demo_cairo_argb32.png') + +print ("\n\n", len(images_), "maps have been rendered in the current directory:") + +for im_ in images_: + print ("-", im_) + +print ("\n\nHave a look!\n\n") + +mapnik.save_map(m,"map.xml") diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 000000000..70a77158f --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,238 @@ +# Getting started with Python bindings + +## Overview + +This tutorial will ensure that Mapnik and its Python bindings are properly installed and introduce you to some of the basic programming concepts for Mapnik. + +## Step 1: check installation + +Make sure you have mapnik installed. You should be able to open a terminal and type: + +```sh +mapnik-config -v # should return a version number. +``` + +Next test the Python bindings. You should be able to open a terminal and type: + +```sh +python -c "import mapnik;print mapnik.__file__" # should return the path to the python bindings and no errors +``` + +If the above does not work (e.g. throws an `ImportError`) then please go back and ensure [Mapnik](https://github.com/mapnik/mapnik/wiki/Mapnik-Installation) and the [Mapnik Python bindings](/README.md) are properly installed. +## Step 2 + +Now, we need some data to render. Let's use a shapefile of world border polygons from [naturalearthdata.com](http://naturalearthdata.com) ([direct link](http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_countries.zip)). Unzip the archive in an easily accessible location of your choosing. In *Step 3* we will be referencing the path to this shapefile in Python code, so make sure you know where you put it. + +Once unzipped, you should see four files like: + +```sh +ne_110m_admin_0_countries.shp +ne_110m_admin_0_countries.shx +ne_110m_admin_0_countries.dbf +ne_110m_admin_0_countries.prj +``` + +To download and unzip on the command line with the do: + +```sh +wget http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_countries.zip +unzip ne_110m_admin_0_countries.zip +``` + +## Step 3 + +Now we're going to program in Python and Mapnik, using sample code and the Python interpreter. + +The idea here is not that you have to interact with Mapnik via Python, but that this is a good way to build foundational skills for how Mapnik works. + +So, let's begin! Open a Python interpreter simply by typing in your terminal: + +```sh +python +``` + +The code below can be pasted into your interpreter. Ideally paste line by line so you can confirm each step is working. The commented lines (#) should be able to be pasted without trouble, but depending on your interpreter setting may cause errors. + +### Import Mapnik + +Import the Mapnik Python bindings: + +```python +import mapnik +``` + +### Create a Map + +```python +m = mapnik.Map(600,300) # create a map with a given width and height in pixels +# note: m.srs will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' +# the 'map.srs' is the target projection of the map and can be whatever you wish +m.background = mapnik.Color('steelblue') # set background colour to 'steelblue'. +``` + +### Create a Style + +Create the Styles which determines how the data is rendered: + +```python +s = mapnik.Style() # style object to hold rules +r = mapnik.Rule() # rule object to hold symbolizers +# to fill a polygon we create a PolygonSymbolizer +polygon_symbolizer = mapnik.PolygonSymbolizer() +polygon_symbolizer.fill = mapnik.Color('#f2eff9') +r.symbols.append(polygon_symbolizer) # add the symbolizer to the rule object + +# to add outlines to a polygon we create a LineSymbolizer +line_symbolizer = mapnik.LineSymbolizer() +line_symbolizer.stroke = mapnik.Color('rgb(50%,50%,50%)') +line_symbolizer.stroke_width = 0.1 +r.symbols.append(line_symbolizer) # add the symbolizer to the rule object +s.rules.append(r) # now add the rule to the style and we're done +``` + +And add the Style to the Map: + +```python +m.append_style('My Style',s) # Styles are given names only as they are applied to the map +``` + +### Create a Datasource + +In *Step 2* above you should have downloaded a sample shapefile of polygons of world countries. We are now going to load that into a `mapnik.Datasource` object in Python. + +If your Python interpreter was launched from the same directory as you downloaded the natural earth shapefile to you should be able to use a relative path to create the datasource like: + +``` python +ds = mapnik.Shapefile(file='ne_110m_admin_0_countries.shp') +``` + +Otherwise use an absolute path (exchanging `/Users/dane/Downloads/` for the correct path on your machine): + +``` python +ds = mapnik.Shapefile(file='/Users/dane/Downloads/ne_110m_admin_0_countries.shp') +``` + +Note: optionally (to learn about your data) you can call the `envelope()` function off the datasource object to see the full coordinate bounds of the data: + +``` python +>>> ds.envelope() +Box2d(-180.0,-90.0,180.0,83.64513) +``` + +That shows the minx, miny, maxx, and maxy of the data. Because the above coordinates are between -180 and 180 for the x or longitude values and -90 and 90 for the y or latitude values we know this data is in *geographic* coordinates and uses degrees for units - a pretty good indication this is `WGS84 (aka EPSG:4326)`. This specific shapefile also stores this projection information as a `WKT` string in the `ne_110m_admin_0_countries.prj` file. See the `layer.srs` value below for why this matters. + + +### Create a Layer + +Mapnik Layers are basically containers around datasources, that store useful properties. Lets now create a Layer object and add the datasource to it. + +``` python +layer = mapnik.Layer('world') # new layer called 'world' (we could name it anything) +# note: layer.srs will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' +``` + +Note: the 'layer.srs' is the source projection of the Datasource and *must* match the projection of the coordinates of that data or else your map will likely be blank. Mapnik uses [Proj.4](http://trac.osgeo.org/proj/wiki/FAQ) strings to specify the spatial references system. In this case, the default `srs` Mapnik assumes (`+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs`) happens to match the projection of the data. When this is not the case you must set the layer.srs to the correct value (which is beyond the scope of this tutorial). + +Now attach the datasource to the layer, and reference: + +```python +layer.datasource = ds +``` + +Lastly, we need to make sure the style we created above (and attached to the map) is also applied to the layer, by its string reference: + +```python +layer.styles.append('My Style') +``` + +### Prepare the Map for rendering + +This step is critical. Finally add the layer to the map and zoom to the full extent of the data layer (using `zoom_all` which will calculate the cumulative extent of all layers attached to the map). If you do not zoom the Map to the extent of the layer(s), then the rendered output will be blank. + +```python +m.layers.append(layer) +m.zoom_all() +``` + +### Render your map + +Finish up by rendering your map image: + +```python +# Write the data to a png image called world.png in the current directory +mapnik.render_to_file(m,'world.png', 'png') + +# Exit the Python interpreter +exit() # or ctrl-d +``` + +Then back in your normal shell type: + +```sh +# On a mac +open world.png +# On windows +start world.png +``` + +Or navigate to your base directory and open `world.png` and the result should look like this: + +![The world map](images/world.png) + +### Step 4 + +The next logical step is to run that same code all at once as a Python script from your shell/terminal (rather than pasted into the Python interpreter line-by-line). This way you will be able to modify and experiment with the settings, then simply re-run the script. + +So, create a blank text file called `world.py`. + +Make it executable: + + chmod +x world.py + +Then add a line at the top of the script like: + +```sh +#!/usr/bin/env python +``` + +Finally, append the entire text below and save the file. + +```python +import mapnik +m = mapnik.Map(600,300) +m.background = mapnik.Color('steelblue') +s = mapnik.Style() +r = mapnik.Rule() +polygon_symbolizer = mapnik.PolygonSymbolizer() +polygon_symbolizer.fill = mapnik.Color('#f2eff9') +r.symbols.append(polygon_symbolizer) + +line_symbolizer = mapnik.LineSymbolizer() +line_symbolizer.stroke = mapnik.Color('rgb(50%,50%,50%)') +line_symbolizer.stroke_width = 0.1 + +r.symbols.append(line_symbolizer) +s.rules.append(r) +m.append_style('My Style',s) +ds = mapnik.Shapefile(file='ne_110m_admin_0_countries.shp') +layer = mapnik.Layer('world') +layer.datasource = ds +layer.styles.append('My Style') +m.layers.append(layer) +m.zoom_all() +mapnik.render_to_file(m,'world.png', 'png') +print "rendered image to 'world.png'" +``` + + * Don't forget to ensure the correct path to your `ne_110m_admin_0_countries.shp` shapefile. + * Mapnik accepts both the absolute path to your data as well as the relative path (Same goes for the path to where you want to save your file) + +Finally run the script with the command: + + +```sh +./world.py # You must be in the same directory as you saved the script +``` + + * Note: if you re-run this script it will will re-write over the world.png map. + * Now you can easily open the script in a separate text editor and try changing the dimensions, colors, or datasource (remember to use the correct `srs` if you change the datasource). diff --git a/docs/images/world.png b/docs/images/world.png new file mode 100644 index 000000000..1f4156f9a Binary files /dev/null and b/docs/images/world.png differ diff --git a/mapnik/__init__.py b/mapnik/__init__.py deleted file mode 100644 index 3eef555bb..000000000 --- a/mapnik/__init__.py +++ /dev/null @@ -1,1073 +0,0 @@ -# -# This file is part of Mapnik (C++/Python mapping toolkit) -# Copyright (C) 2014 Artem Pavlenko -# -# Mapnik is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# - -"""Mapnik Python module. - -Boost Python bindings to the Mapnik C++ shared library. - -Several things happen when you do: - - >>> import mapnik - - 1) Mapnik C++ objects are imported via the '__init__.py' from the '_mapnik.so' shared object - (_mapnik.pyd on win) which references libmapnik.so (linux), libmapnik.dylib (mac), or - mapnik.dll (win32). - - 2) The paths to the input plugins and font directories are imported from the 'paths.py' - file which was constructed and installed during SCons installation. - - 3) All available input plugins and TrueType fonts are automatically registered. - - 4) Boost Python metaclass injectors are used in the '__init__.py' to extend several - objects adding extra convenience when accessed via Python. - -""" - -import itertools -import os -import warnings -try: - import json -except ImportError: - import simplejson as json - -def bootstrap_env(): - """ - If an optional settings file exists, inherit its - environment settings before loading the mapnik library. - - This feature is intended for customized packages of mapnik. - - The settings file should be a python file with an 'env' variable - that declares a dictionary of key:value pairs to push into the - global process environment, if not already set, like: - - env = {'ICU_DATA':'/usr/local/share/icu/'} - """ - if os.path.exists(os.path.join(os.path.dirname(__file__),'mapnik_settings.py')): - from mapnik_settings import env - process_keys = os.environ.keys() - for key, value in env.items(): - if key not in process_keys: - os.environ[key] = value - -bootstrap_env() - -from _mapnik import * - -import printing -printing.renderer = render - -# The base Boost.Python class -BoostPythonMetaclass = Coord.__class__ - -class _MapnikMetaclass(BoostPythonMetaclass): - def __init__(self, name, bases, dict): - for b in bases: - if type(b) not in (self, type): - for k,v in list(dict.items()): - if hasattr(b, k): - setattr(b, '_c_'+k, getattr(b, k)) - setattr(b,k,v) - return type.__init__(self, name, bases, dict) - -# metaclass injector compatible with both python 2 and 3 -# http://mikewatkins.ca/2008/11/29/python-2-and-3-metaclasses/ -_injector = _MapnikMetaclass('_injector', (object, ), {}) - -def Filter(*args,**kwargs): - warnings.warn("'Filter' is deprecated and will be removed in Mapnik 3.x, use 'Expression' instead", - DeprecationWarning, 2) - return Expression(*args, **kwargs) - -class Envelope(Box2d): - def __init__(self, *args, **kwargs): - warnings.warn("'Envelope' is deprecated and will be removed in Mapnik 3.x, use 'Box2d' instead", - DeprecationWarning, 2) - Box2d.__init__(self, *args, **kwargs) - -class _Coord(Coord,_injector): - """ - Represents a point with two coordinates (either lon/lat or x/y). - - Following operators are defined for Coord: - - Addition and subtraction of Coord objects: - - >>> Coord(10, 10) + Coord(20, 20) - Coord(30.0, 30.0) - >>> Coord(10, 10) - Coord(20, 20) - Coord(-10.0, -10.0) - - Addition, subtraction, multiplication and division between - a Coord and a float: - - >>> Coord(10, 10) + 1 - Coord(11.0, 11.0) - >>> Coord(10, 10) - 1 - Coord(-9.0, -9.0) - >>> Coord(10, 10) * 2 - Coord(20.0, 20.0) - >>> Coord(10, 10) / 2 - Coord(5.0, 5.0) - - Equality of coords (as pairwise equality of components): - >>> Coord(10, 10) is Coord(10, 10) - False - >>> Coord(10, 10) == Coord(10, 10) - True - """ - def __repr__(self): - return 'Coord(%s,%s)' % (self.x, self.y) - - def forward(self, projection): - """ - Projects the point from the geographic coordinate - space into the cartesian space. The x component is - considered to be longitude, the y component the - latitude. - - Returns the easting (x) and northing (y) as a - coordinate pair. - - Example: Project the geographic coordinates of the - city center of Stuttgart into the local - map projection (GK Zone 3/DHDN, EPSG 31467) - >>> p = Projection('+init=epsg:31467') - >>> Coord(9.1, 48.7).forward(p) - Coord(3507360.12813,5395719.2749) - """ - return forward_(self, projection) - - def inverse(self, projection): - """ - Projects the point from the cartesian space - into the geographic space. The x component is - considered to be the easting, the y component - to be the northing. - - Returns the longitude (x) and latitude (y) as a - coordinate pair. - - Example: Project the cartesian coordinates of the - city center of Stuttgart in the local - map projection (GK Zone 3/DHDN, EPSG 31467) - into geographic coordinates: - >>> p = Projection('+init=epsg:31467') - >>> Coord(3507360.12813,5395719.2749).inverse(p) - Coord(9.1, 48.7) - """ - return inverse_(self, projection) - -class _Box2d(Box2d,_injector): - """ - Represents a spatial envelope (i.e. bounding box). - - - Following operators are defined for Box2d: - - Addition: - e1 + e2 is equvalent to e1.expand_to_include(e2) but yields - a new envelope instead of modifying e1 - - Subtraction: - Currently e1 - e2 returns e1. - - Multiplication and division with floats: - Multiplication and division change the width and height of the envelope - by the given factor without modifying its center.. - - That is, e1 * x is equivalent to: - e1.width(x * e1.width()) - e1.height(x * e1.height()), - except that a new envelope is created instead of modifying e1. - - e1 / x is equivalent to e1 * (1.0/x). - - Equality: two envelopes are equal if their corner points are equal. - """ - - def __repr__(self): - return 'Box2d(%s,%s,%s,%s)' % \ - (self.minx,self.miny,self.maxx,self.maxy) - - def forward(self, projection): - """ - Projects the envelope from the geographic space - into the cartesian space by projecting its corner - points. - - See also: - Coord.forward(self, projection) - """ - return forward_(self, projection) - - def inverse(self, projection): - """ - Projects the envelope from the cartesian space - into the geographic space by projecting its corner - points. - - See also: - Coord.inverse(self, projection). - """ - return inverse_(self, projection) - -class _Projection(Projection,_injector): - - def __repr__(self): - return "Projection('%s')" % self.params() - - def forward(self,obj): - """ - Projects the given object (Box2d or Coord) - from the geographic space into the cartesian space. - - See also: - Box2d.forward(self, projection), - Coord.forward(self, projection). - """ - return forward_(obj,self) - - def inverse(self,obj): - """ - Projects the given object (Box2d or Coord) - from the cartesian space into the geographic space. - - See also: - Box2d.inverse(self, projection), - Coord.inverse(self, projection). - """ - return inverse_(obj,self) - -class _Feature(Feature,_injector): - __geo_interface__ = property(lambda self: json.loads(self.to_geojson())) - -class _Geometry(Geometry,_injector): - __geo_interface__ = property(lambda self: json.loads(self.to_geojson())) - -class _Datasource(Datasource,_injector): - - def all_features(self,fields=None,variables={}): - query = Query(self.envelope()) - query.set_variables(variables); - attributes = fields or self.fields() - for fld in attributes: - query.add_property_name(fld) - return self.features(query).features - - def featureset(self,fields=None,variables={}): - query = Query(self.envelope()) - query.set_variables(variables); - attributes = fields or self.fields() - for fld in attributes: - query.add_property_name(fld) - return self.features(query) - -class _Color(Color,_injector): - def __repr__(self): - return "Color(R=%d,G=%d,B=%d,A=%d)" % (self.r,self.g,self.b,self.a) - -class _SymbolizerBase(SymbolizerBase,_injector): - # back compatibility - @property - def filename(self): - return self['file'] - - @filename.setter - def filename(self, val): - self['file'] = val - -def _add_symbol_method_to_symbolizers(vars=globals()): - - def symbol_for_subcls(self): - return self - - def symbol_for_cls(self): - return getattr(self,self.type())() - - for name, obj in vars.items(): - if name.endswith('Symbolizer') and not name.startswith('_'): - if name == 'Symbolizer': - symbol = symbol_for_cls - else: - symbol = symbol_for_subcls - type('dummy', (obj,_injector), {'symbol': symbol}) -_add_symbol_method_to_symbolizers() - -def Datasource(**keywords): - """Wrapper around CreateDatasource. - - Create a Mapnik Datasource using a dictionary of parameters. - - Keywords must include: - - type='plugin_name' # e.g. type='gdal' - - See the convenience factory methods of each input plugin for - details on additional required keyword arguments. - - """ - - return CreateDatasource(keywords) - -# convenience factory methods - -def Shapefile(**keywords): - """Create a Shapefile Datasource. - - Required keyword arguments: - file -- path to shapefile without extension - - Optional keyword arguments: - base -- path prefix (default None) - encoding -- file encoding (default 'utf-8') - - >>> from mapnik import Shapefile, Layer - >>> shp = Shapefile(base='/home/mapnik/data',file='world_borders') - >>> lyr = Layer('Shapefile Layer') - >>> lyr.datasource = shp - - """ - keywords['type'] = 'shape' - return CreateDatasource(keywords) - -def CSV(**keywords): - """Create a CSV Datasource. - - Required keyword arguments: - file -- path to csv - - Optional keyword arguments: - inline -- inline CSV string (if provided 'file' argument will be ignored and non-needed) - base -- path prefix (default None) - encoding -- file encoding (default 'utf-8') - row_limit -- integer limit of rows to return (default: 0) - strict -- throw an error if an invalid row is encountered - escape -- The escape character to use for parsing data - quote -- The quote character to use for parsing data - separator -- The separator character to use for parsing data - headers -- A comma separated list of header names that can be set to add headers to data that lacks them - filesize_max -- The maximum filesize in MB that will be accepted - - >>> from mapnik import CSV - >>> csv = CSV(file='test.csv') - - >>> from mapnik import CSV - >>> csv = CSV(inline='''wkt,Name\n"POINT (120.15 48.47)","Winthrop, WA"''') - - For more information see https://github.com/mapnik/mapnik/wiki/CSV-Plugin - - """ - keywords['type'] = 'csv' - return CreateDatasource(keywords) - -def GeoJSON(**keywords): - """Create a GeoJSON Datasource. - - Required keyword arguments: - file -- path to json - - Optional keyword arguments: - encoding -- file encoding (default 'utf-8') - base -- path prefix (default None) - - >>> from mapnik import GeoJSON - >>> geojson = GeoJSON(file='test.json') - - """ - keywords['type'] = 'geojson' - return CreateDatasource(keywords) - -def PostGIS(**keywords): - """Create a PostGIS Datasource. - - Required keyword arguments: - dbname -- database name to connect to - table -- table name or subselect query - - *Note: if using subselects for the 'table' value consider also - passing the 'geometry_field' and 'srid' and 'extent_from_subquery' - options and/or specifying the 'geometry_table' option. - - Optional db connection keyword arguments: - user -- database user to connect as (default: see postgres docs) - password -- password for database user (default: see postgres docs) - host -- portgres hostname (default: see postgres docs) - port -- postgres port (default: see postgres docs) - initial_size -- integer size of connection pool (default: 1) - max_size -- integer max of connection pool (default: 10) - persist_connection -- keep connection open (default: True) - - Optional table-level keyword arguments: - extent -- manually specified data extent (comma delimited string, default: None) - estimate_extent -- boolean, direct PostGIS to use the faster, less accurate `estimate_extent` over `extent` (default: False) - extent_from_subquery -- boolean, direct Mapnik to query Postgis for the extent of the raw 'table' value (default: uses 'geometry_table') - geometry_table -- specify geometry table to use to look up metadata (default: automatically parsed from 'table' value) - geometry_field -- specify geometry field to use (default: first entry in geometry_columns) - srid -- specify srid to use (default: auto-detected from geometry_field) - row_limit -- integer limit of rows to return (default: 0) - cursor_size -- integer size of binary cursor to use (default: 0, no binary cursor is used) - - >>> from mapnik import PostGIS, Layer - >>> params = dict(dbname=env['MAPNIK_NAME'],table='osm',user='postgres',password='gis') - >>> params['estimate_extent'] = False - >>> params['extent'] = '-20037508,-19929239,20037508,19929239' - >>> postgis = PostGIS(**params) - >>> lyr = Layer('PostGIS Layer') - >>> lyr.datasource = postgis - - """ - keywords['type'] = 'postgis' - return CreateDatasource(keywords) - -def PgRaster(**keywords): - """Create a PgRaster Datasource. - - Required keyword arguments: - dbname -- database name to connect to - table -- table name or subselect query - - *Note: if using subselects for the 'table' value consider also - passing the 'raster_field' and 'srid' and 'extent_from_subquery' - options and/or specifying the 'raster_table' option. - - Optional db connection keyword arguments: - user -- database user to connect as (default: see postgres docs) - password -- password for database user (default: see postgres docs) - host -- portgres hostname (default: see postgres docs) - port -- postgres port (default: see postgres docs) - initial_size -- integer size of connection pool (default: 1) - max_size -- integer max of connection pool (default: 10) - persist_connection -- keep connection open (default: True) - - Optional table-level keyword arguments: - extent -- manually specified data extent (comma delimited string, default: None) - estimate_extent -- boolean, direct PostGIS to use the faster, less accurate `estimate_extent` over `extent` (default: False) - extent_from_subquery -- boolean, direct Mapnik to query Postgis for the extent of the raw 'table' value (default: uses 'geometry_table') - raster_table -- specify geometry table to use to look up metadata (default: automatically parsed from 'table' value) - raster_field -- specify geometry field to use (default: first entry in raster_columns) - srid -- specify srid to use (default: auto-detected from geometry_field) - row_limit -- integer limit of rows to return (default: 0) - cursor_size -- integer size of binary cursor to use (default: 0, no binary cursor is used) - use_overviews -- boolean, use overviews when available (default: false) - prescale_rasters -- boolean, scale rasters on the db side (default: false) - clip_rasters -- boolean, clip rasters on the db side (default: false) - band -- integer, if non-zero interprets the given band (1-based offset) as a data raster (default: 0) - - >>> from mapnik import PgRaster, Layer - >>> params = dict(dbname='mapnik',table='osm',user='postgres',password='gis') - >>> params['estimate_extent'] = False - >>> params['extent'] = '-20037508,-19929239,20037508,19929239' - >>> pgraster = PgRaster(**params) - >>> lyr = Layer('PgRaster Layer') - >>> lyr.datasource = pgraster - - """ - keywords['type'] = 'pgraster' - return CreateDatasource(keywords) - -def Raster(**keywords): - """Create a Raster (Tiff) Datasource. - - Required keyword arguments: - file -- path to stripped or tiled tiff - lox -- lowest (min) x/longitude of tiff extent - loy -- lowest (min) y/latitude of tiff extent - hix -- highest (max) x/longitude of tiff extent - hiy -- highest (max) y/latitude of tiff extent - - Hint: lox,loy,hix,hiy make a Mapnik Box2d - - Optional keyword arguments: - base -- path prefix (default None) - multi -- whether the image is in tiles on disk (default False) - - Multi-tiled keyword arguments: - x_width -- virtual image number of tiles in X direction (required) - y_width -- virtual image number of tiles in Y direction (required) - tile_size -- if an image is in tiles, how large are the tiles (default 256) - tile_stride -- if an image is in tiles, what's the increment between rows/cols (default 1) - - >>> from mapnik import Raster, Layer - >>> raster = Raster(base='/home/mapnik/data',file='elevation.tif',lox=-122.8,loy=48.5,hix=-122.7,hiy=48.6) - >>> lyr = Layer('Tiff Layer') - >>> lyr.datasource = raster - - """ - keywords['type'] = 'raster' - return CreateDatasource(keywords) - -def Gdal(**keywords): - """Create a GDAL Raster Datasource. - - Required keyword arguments: - file -- path to GDAL supported dataset - - Optional keyword arguments: - base -- path prefix (default None) - shared -- boolean, open GdalDataset in shared mode (default: False) - bbox -- tuple (minx, miny, maxx, maxy). If specified, overrides the bbox detected by GDAL. - - >>> from mapnik import Gdal, Layer - >>> dataset = Gdal(base='/home/mapnik/data',file='elevation.tif') - >>> lyr = Layer('GDAL Layer from TIFF file') - >>> lyr.datasource = dataset - - """ - keywords['type'] = 'gdal' - if 'bbox' in keywords: - if isinstance(keywords['bbox'], (tuple, list)): - keywords['bbox'] = ','.join([str(item) for item in keywords['bbox']]) - return CreateDatasource(keywords) - -def Occi(**keywords): - """Create a Oracle Spatial (10g) Vector Datasource. - - Required keyword arguments: - user -- database user to connect as - password -- password for database user - host -- oracle host to connect to (does not refer to SID in tsnames.ora) - table -- table name or subselect query - - Optional keyword arguments: - initial_size -- integer size of connection pool (default 1) - max_size -- integer max of connection pool (default 10) - extent -- manually specified data extent (comma delimited string, default None) - estimate_extent -- boolean, direct Oracle to use the faster, less accurate estimate_extent() over extent() (default False) - encoding -- file encoding (default 'utf-8') - geometry_field -- specify geometry field (default 'GEOLOC') - use_spatial_index -- boolean, force the use of the spatial index (default True) - - >>> from mapnik import Occi, Layer - >>> params = dict(host='myoracle',user='scott',password='tiger',table='test') - >>> params['estimate_extent'] = False - >>> params['extent'] = '-20037508,-19929239,20037508,19929239' - >>> oracle = Occi(**params) - >>> lyr = Layer('Oracle Spatial Layer') - >>> lyr.datasource = oracle - """ - keywords['type'] = 'occi' - return CreateDatasource(keywords) - -def Ogr(**keywords): - """Create a OGR Vector Datasource. - - Required keyword arguments: - file -- path to OGR supported dataset - layer -- name of layer to use within datasource (optional if layer_by_index or layer_by_sql is used) - - Optional keyword arguments: - layer_by_index -- choose layer by index number instead of by layer name or sql. - layer_by_sql -- choose layer by sql query number instead of by layer name or index. - base -- path prefix (default None) - encoding -- file encoding (default 'utf-8') - - >>> from mapnik import Ogr, Layer - >>> datasource = Ogr(base='/home/mapnik/data',file='rivers.geojson',layer='OGRGeoJSON') - >>> lyr = Layer('OGR Layer from GeoJSON file') - >>> lyr.datasource = datasource - - """ - keywords['type'] = 'ogr' - return CreateDatasource(keywords) - -def SQLite(**keywords): - """Create a SQLite Datasource. - - Required keyword arguments: - file -- path to SQLite database file - table -- table name or subselect query - - Optional keyword arguments: - base -- path prefix (default None) - encoding -- file encoding (default 'utf-8') - extent -- manually specified data extent (comma delimited string, default None) - metadata -- name of auxillary table containing record for table with xmin, ymin, xmax, ymax, and f_table_name - geometry_field -- name of geometry field (default 'the_geom') - key_field -- name of primary key field (default 'OGC_FID') - row_offset -- specify a custom integer row offset (default 0) - row_limit -- specify a custom integer row limit (default 0) - wkb_format -- specify a wkb type of 'spatialite' (default None) - use_spatial_index -- boolean, instruct sqlite plugin to use Rtree spatial index (default True) - - >>> from mapnik import SQLite, Layer - >>> sqlite = SQLite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239') - >>> lyr = Layer('SQLite Layer') - >>> lyr.datasource = sqlite - - """ - keywords['type'] = 'sqlite' - return CreateDatasource(keywords) - -def Rasterlite(**keywords): - """Create a Rasterlite Datasource. - - Required keyword arguments: - file -- path to Rasterlite database file - table -- table name or subselect query - - Optional keyword arguments: - base -- path prefix (default None) - extent -- manually specified data extent (comma delimited string, default None) - - >>> from mapnik import Rasterlite, Layer - >>> rasterlite = Rasterlite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239') - >>> lyr = Layer('Rasterlite Layer') - >>> lyr.datasource = rasterlite - - """ - keywords['type'] = 'rasterlite' - return CreateDatasource(keywords) - -def Osm(**keywords): - """Create a Osm Datasource. - - Required keyword arguments: - file -- path to OSM file - - Optional keyword arguments: - encoding -- file encoding (default 'utf-8') - url -- url to fetch data (default None) - bbox -- data bounding box for fetching data (default None) - - >>> from mapnik import Osm, Layer - >>> datasource = Osm(file='test.osm') - >>> lyr = Layer('Osm Layer') - >>> lyr.datasource = datasource - - """ - # note: parser only supports libxml2 so not exposing option - # parser -- xml parser to use (default libxml2) - keywords['type'] = 'osm' - return CreateDatasource(keywords) - -def Python(**keywords): - """Create a Python Datasource. - - >>> from mapnik import Python, PythonDatasource - >>> datasource = Python('PythonDataSource') - >>> lyr = Layer('Python datasource') - >>> lyr.datasource = datasource - """ - keywords['type'] = 'python' - return CreateDatasource(keywords) - -def MemoryDatasource(**keywords): - """Create a Memory Datasource. - - Optional keyword arguments: - (TODO) - """ - params = Parameters() - params.append(Parameter('type','memory')) - return MemoryDatasourceBase(params) - -class PythonDatasource(object): - """A base class for a Python data source. - - Optional arguments: - envelope -- a mapnik.Box2d (minx, miny, maxx, maxy) envelope of the data source, default (-180,-90,180,90) - geometry_type -- one of the DataGeometryType enumeration values, default Point - data_type -- one of the DataType enumerations, default Vector - """ - def __init__(self, envelope=None, geometry_type=None, data_type=None): - self.envelope = envelope or Box2d(-180, -90, 180, 90) - self.geometry_type = geometry_type or DataGeometryType.Point - self.data_type = data_type or DataType.Vector - - def features(self, query): - """Return an iterable which yields instances of Feature for features within the passed query. - - Required arguments: - query -- a Query instance specifying the region for which features should be returned - """ - return None - - def features_at_point(self, point): - """Rarely uses. Return an iterable which yields instances of Feature for the specified point.""" - return None - - @classmethod - def wkb_features(cls, keys, features): - """A convenience function to wrap an iterator yielding pairs of WKB format geometry and dictionaries of - key-value pairs into mapnik features. Return this from PythonDatasource.features() passing it a sequence of keys - to appear in the output and an iterator yielding features. - - For example. One might have a features() method in a derived class like the following: - - def features(self, query): - # ... create WKB features feat1 and feat2 - - return mapnik.PythonDatasource.wkb_features( - keys = ( 'name', 'author' ), - features = [ - (feat1, { 'name': 'feat1', 'author': 'alice' }), - (feat2, { 'name': 'feat2', 'author': 'bob' }), - ] - ) - - """ - ctx = Context() - [ctx.push(x) for x in keys] - - def make_it(feat, idx): - f = Feature(ctx, idx) - geom, attrs = feat - f.add_geometries_from_wkb(geom) - for k, v in attrs.iteritems(): - f[k] = v - return f - - return itertools.imap(make_it, features, itertools.count(1)) - - @classmethod - def wkt_features(cls, keys, features): - """A convenience function to wrap an iterator yielding pairs of WKT format geometry and dictionaries of - key-value pairs into mapnik features. Return this from PythonDatasource.features() passing it a sequence of keys - to appear in the output and an iterator yielding features. - - For example. One might have a features() method in a derived class like the following: - - def features(self, query): - # ... create WKT features feat1 and feat2 - - return mapnik.PythonDatasource.wkt_features( - keys = ( 'name', 'author' ), - features = [ - (feat1, { 'name': 'feat1', 'author': 'alice' }), - (feat2, { 'name': 'feat2', 'author': 'bob' }), - ] - ) - - """ - ctx = Context() - [ctx.push(x) for x in keys] - - def make_it(feat, idx): - f = Feature(ctx, idx) - geom, attrs = feat - f.add_geometries_from_wkt(geom) - for k, v in attrs.iteritems(): - f[k] = v - return f - - return itertools.imap(make_it, features, itertools.count(1)) - -class _TextSymbolizer(TextSymbolizer,_injector): - @property - def name(self): - if isinstance(self.properties.format_tree, FormattingText): - return self.properties.format_tree.text - else: - # There is no single expression which could be returned as name - raise RuntimeError("TextSymbolizer uses complex formatting features, but old compatibility interface is used to access it. Use self.properties.format_tree instead.") - - @name.setter - def name(self, name): - self.properties.format_tree = FormattingText(name) - - @property - def text_size(self): - return self.format.text_size - - @text_size.setter - def text_size(self, text_size): - self.format.text_size = text_size - - @property - def face_name(self): - return self.format.face_name - - @face_name.setter - def face_name(self, face_name): - self.format.face_name = face_name - - - @property - def fontset(self): - return self.format.fontset - - @fontset.setter - def fontset(self, fontset): - self.format.fontset = fontset - - - @property - def character_spacing(self): - return self.format.character_spacing - - @character_spacing.setter - def character_spacing(self, character_spacing): - self.format.character_spacing = character_spacing - - - @property - def line_spacing(self): - return self.format.line_spacing - - @line_spacing.setter - def line_spacing(self, line_spacing): - self.format.line_spacing = line_spacing - - - @property - def text_opacity(self): - return self.format.text_opacity - - @text_opacity.setter - def text_opacity(self, text_opacity): - self.format.text_opacity = text_opacity - - - @property - def wrap_before(self): - return self.format.wrap_before - - @wrap_before.setter - def wrap_before(self, wrap_before): - self.format.wrap_before = wrap_before - - - @property - def text_transform(self): - return self.format.text_transform - - @text_transform.setter - def text_transform(self, text_transform): - self.format.text_transform = text_transform - - - @property - def fill(self): - return self.format.fill - - @fill.setter - def fill(self, fill): - self.format.fill = fill - - - @property - def halo_fill(self): - return self.format.halo_fill - - @halo_fill.setter - def halo_fill(self, halo_fill): - self.format.halo_fill = halo_fill - - - - @property - def halo_radius(self): - return self.format.halo_radius - - @halo_radius.setter - def halo_radius(self, halo_radius): - self.format.halo_radius = halo_radius - - - @property - def label_placement(self): - return self.properties.label_placement - - @label_placement.setter - def label_placement(self, label_placement): - self.properties.label_placement = label_placement - - - - @property - def horizontal_alignment(self): - return self.properties.horizontal_alignment - - @horizontal_alignment.setter - def horizontal_alignment(self, horizontal_alignment): - self.properties.horizontal_alignment = horizontal_alignment - - - - @property - def justify_alignment(self): - return self.properties.justify_alignment - - @justify_alignment.setter - def justify_alignment(self, justify_alignment): - self.properties.justify_alignment = justify_alignment - - - - @property - def vertical_alignment(self): - return self.properties.vertical_alignment - - @vertical_alignment.setter - def vertical_alignment(self, vertical_alignment): - self.properties.vertical_alignment = vertical_alignment - - - - @property - def orientation(self): - return self.properties.orientation - - @orientation.setter - def orientation(self, orientation): - self.properties.orientation = orientation - - - - @property - def displacement(self): - return self.properties.displacement - - @displacement.setter - def displacement(self, displacement): - self.properties.displacement = displacement - - - - @property - def label_spacing(self): - return self.properties.label_spacing - - @label_spacing.setter - def label_spacing(self, label_spacing): - self.properties.label_spacing = label_spacing - - - - @property - def label_position_tolerance(self): - return self.properties.label_position_tolerance - - @label_position_tolerance.setter - def label_position_tolerance(self, label_position_tolerance): - self.properties.label_position_tolerance = label_position_tolerance - - - - @property - def avoid_edges(self): - return self.properties.avoid_edges - - @avoid_edges.setter - def avoid_edges(self, avoid_edges): - self.properties.avoid_edges = avoid_edges - - - - @property - def minimum_distance(self): - return self.properties.minimum_distance - - @minimum_distance.setter - def minimum_distance(self, minimum_distance): - self.properties.minimum_distance = minimum_distance - - - - @property - def minimum_padding(self): - return self.properties.minimum_padding - - @minimum_padding.setter - def minimum_padding(self, minimum_padding): - self.properties.minimum_padding = minimum_padding - - - - @property - def minimum_path_length(self): - return self.properties.minimum_path_length - - @minimum_path_length.setter - def minimum_path_length(self, minimum_path_length): - self.properties.minimum_path_length = minimum_path_length - - - - @property - def maximum_angle_char_delta(self): - return self.properties.maximum_angle_char_delta - - @maximum_angle_char_delta.setter - def maximum_angle_char_delta(self, maximum_angle_char_delta): - self.properties.maximum_angle_char_delta = maximum_angle_char_delta - - - @property - def allow_overlap(self): - return self.properties.allow_overlap - - @allow_overlap.setter - def allow_overlap(self, allow_overlap): - self.properties.allow_overlap = allow_overlap - - - - @property - def text_ratio(self): - return self.properties.text_ratio - - @text_ratio.setter - def text_ratio(self, text_ratio): - self.properties.text_ratio = text_ratio - - - - @property - def wrap_width(self): - return self.properties.wrap_width - - @wrap_width.setter - def wrap_width(self, wrap_width): - self.properties.wrap_width = wrap_width - - -def mapnik_version_from_string(version_string): - """Return the Mapnik version from a string.""" - n = version_string.split('.') - return (int(n[0]) * 100000) + (int(n[1]) * 100) + (int(n[2])); - -def register_plugins(path=None): - """Register plugins located by specified path""" - if not path: - if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): - path = os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY') - else: - from paths import inputpluginspath - path = inputpluginspath - DatasourceCache.register_datasources(path) - -def register_fonts(path=None,valid_extensions=['.ttf','.otf','.ttc','.pfa','.pfb','.ttc','.dfont','.woff']): - """Recursively register fonts using path argument as base directory""" - if not path: - if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): - path = os.environ.get('MAPNIK_FONT_DIRECTORY') - else: - from paths import fontscollectionpath - path = fontscollectionpath - for dirpath, _, filenames in os.walk(path): - for filename in filenames: - if os.path.splitext(filename.lower())[1] in valid_extensions: - FontEngine.instance().register_font(os.path.join(dirpath, filename)) - -# auto-register known plugins and fonts -register_plugins() -register_fonts() diff --git a/mapnik/printing.py b/mapnik/printing.py deleted file mode 100644 index e61f7c0ab..000000000 --- a/mapnik/printing.py +++ /dev/null @@ -1,1027 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Mapnik classes to assist in creating printable maps - -basic usage is along the lines of - -import mapnik - -page = mapnik.printing.PDFPrinter() -m = mapnik.Map(100,100) -mapnik.load_map(m, "my_xml_map_description", True) -m.zoom_all() -page.render_map(m,"my_output_file.pdf") - -see the documentation of mapnik.printing.PDFPrinter() for options - -""" -from __future__ import absolute_import - -from . import render, Map, Box2d, Layer, Feature, Projection, Coord, Style, Geometry -import math -import os -import tempfile - -try: - import cairo - HAS_PYCAIRO_MODULE = True -except ImportError: - HAS_PYCAIRO_MODULE = False - -try: - import pangocairo - import pango - HAS_PANGOCAIRO_MODULE = True -except ImportError: - HAS_PANGOCAIRO_MODULE = False - -try: - import pyPdf - HAS_PYPDF = True -except ImportError: - HAS_PYPDF = False - -class centering: - """Style of centering to use with the map, the default is constrained - - none: map will be placed flush with the margin/box in the top left corner - constrained: map will be centered on the most constrained axis (for a portrait page - and a square map this will be horizontally) - unconstrained: map will be centered on the unconstrained axis - vertical: - horizontal: - both: - """ - none=0 - constrained=1 - unconstrained=2 - vertical=3 - horizontal=4 - both=5 - -"""Some predefined page sizes custom sizes can also be passed -a tuple of the page width and height in meters""" -pagesizes = { - "a0": (0.841000,1.189000), - "a0l": (1.189000,0.841000), - "b0": (1.000000,1.414000), - "b0l": (1.414000,1.000000), - "c0": (0.917000,1.297000), - "c0l": (1.297000,0.917000), - "a1": (0.594000,0.841000), - "a1l": (0.841000,0.594000), - "b1": (0.707000,1.000000), - "b1l": (1.000000,0.707000), - "c1": (0.648000,0.917000), - "c1l": (0.917000,0.648000), - "a2": (0.420000,0.594000), - "a2l": (0.594000,0.420000), - "b2": (0.500000,0.707000), - "b2l": (0.707000,0.500000), - "c2": (0.458000,0.648000), - "c2l": (0.648000,0.458000), - "a3": (0.297000,0.420000), - "a3l": (0.420000,0.297000), - "b3": (0.353000,0.500000), - "b3l": (0.500000,0.353000), - "c3": (0.324000,0.458000), - "c3l": (0.458000,0.324000), - "a4": (0.210000,0.297000), - "a4l": (0.297000,0.210000), - "b4": (0.250000,0.353000), - "b4l": (0.353000,0.250000), - "c4": (0.229000,0.324000), - "c4l": (0.324000,0.229000), - "a5": (0.148000,0.210000), - "a5l": (0.210000,0.148000), - "b5": (0.176000,0.250000), - "b5l": (0.250000,0.176000), - "c5": (0.162000,0.229000), - "c5l": (0.229000,0.162000), - "a6": (0.105000,0.148000), - "a6l": (0.148000,0.105000), - "b6": (0.125000,0.176000), - "b6l": (0.176000,0.125000), - "c6": (0.114000,0.162000), - "c6l": (0.162000,0.114000), - "a7": (0.074000,0.105000), - "a7l": (0.105000,0.074000), - "b7": (0.088000,0.125000), - "b7l": (0.125000,0.088000), - "c7": (0.081000,0.114000), - "c7l": (0.114000,0.081000), - "a8": (0.052000,0.074000), - "a8l": (0.074000,0.052000), - "b8": (0.062000,0.088000), - "b8l": (0.088000,0.062000), - "c8": (0.057000,0.081000), - "c8l": (0.081000,0.057000), - "a9": (0.037000,0.052000), - "a9l": (0.052000,0.037000), - "b9": (0.044000,0.062000), - "b9l": (0.062000,0.044000), - "c9": (0.040000,0.057000), - "c9l": (0.057000,0.040000), - "a10": (0.026000,0.037000), - "a10l": (0.037000,0.026000), - "b10": (0.031000,0.044000), - "b10l": (0.044000,0.031000), - "c10": (0.028000,0.040000), - "c10l": (0.040000,0.028000), - "letter": (0.216,0.279), - "letterl": (0.279,0.216), - "legal": (0.216,0.356), - "legall": (0.356,0.216), -} -"""size of a pt in meters""" -pt_size=0.0254/72.0 - -def m2pt(x): - """convert distance from meters to points""" - return x/pt_size - -def pt2m(x): - """convert distance from points to meters""" - return x*pt_size - -def m2in(x): - """convert distance from meters to inches""" - return x/0.0254 - -def m2px(x,resolution): - """convert distance from meters to pixels at the given resolution in DPI/PPI""" - return m2in(x)*resolution - -class resolutions: - """some predefined resolutions in DPI""" - dpi72=72 - dpi150=150 - dpi300=300 - dpi600=600 - -def any_scale(scale): - """Scale helper function that allows any scale""" - return scale - -def sequence_scale(scale,scale_sequence): - """Default scale helper, this rounds scale to a 'sensible' value""" - factor = math.floor(math.log10(scale)) - norm = scale/(10**factor) - - for s in scale_sequence: - if norm <= s: - return s*10**factor - return scale_sequence[0]*10**(factor+1) - -def default_scale(scale): - """Default scale helper, this rounds scale to a 'sensible' value""" - return sequence_scale(scale, (1,1.25,1.5,1.75,2,2.5,3,4,5,6,7.5,8,9,10)) - -def deg_min_sec_scale(scale): - for x in (1.0/3600, - 2.0/3600, - 5.0/3600, - 10.0/3600, - 30.0/3600, - 1.0/60, - 2.0/60, - 5.0/60, - 10.0/60, - 30.0/60, - 1, - 2, - 5, - 10, - 30, - 60 - ): - if scale < x: - return x - else: - return x - -def format_deg_min_sec(value): - deg = math.floor(value) - min = math.floor((value-deg)/(1.0/60)) - sec = int((value - deg*1.0/60)/1.0/3600) - return "%d°%d'%d\"" % (deg,min,sec) - -def round_grid_generator(first,last,step): - val = (math.floor(first / step) + 1) * step - yield val - while val < last: - val += step - yield val - - -def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse_all_but_last=True): - """ - opens the given multipage PDF and converts each page to be a layer in a single page PDF - layer_names should be a sequence of the user visible names of the layers, if not given - or if shorter than num pages generic names will be given to the unnamed layers - - if output_name is not provided a temporary file will be used for the conversion which - will then be copied back over the source file. - - requires pyPdf >= 1.13 to be available""" - - - if not HAS_PYPDF: - raise Exception("pyPdf Not available") - - infile = file(filename, 'rb') - if output_name: - outfile = file(output_name, 'wb') - else: - (outfd,outfilename) = tempfile.mkstemp(dir=os.path.dirname(filename)) - outfile = os.fdopen(outfd,'wb') - - i = pyPdf.PdfFileReader(infile) - o = pyPdf.PdfFileWriter() - - template_page_size = i.pages[0].mediaBox - op = o.addBlankPage(width=template_page_size.getWidth(),height=template_page_size.getHeight()) - - contentkey = pyPdf.generic.NameObject('/Contents') - resourcekey = pyPdf.generic.NameObject('/Resources') - propertieskey = pyPdf.generic.NameObject('/Properties') - op[contentkey] = pyPdf.generic.ArrayObject() - op[resourcekey] = pyPdf.generic.DictionaryObject() - properties = pyPdf.generic.DictionaryObject() - ocgs = pyPdf.generic.ArrayObject() - - for (i, p) in enumerate(i.pages): - # first start an OCG for the layer - ocgname = pyPdf.generic.NameObject('/oc%d' % i) - ocgstart = pyPdf.generic.DecodedStreamObject() - ocgstart._data = "/OC %s BDC\n" % ocgname - ocgend = pyPdf.generic.DecodedStreamObject() - ocgend._data = "EMC\n" - if isinstance(p['/Contents'],pyPdf.generic.ArrayObject): - p[pyPdf.generic.NameObject('/Contents')].insert(0,ocgstart) - p[pyPdf.generic.NameObject('/Contents')].append(ocgend) - else: - p[pyPdf.generic.NameObject('/Contents')] = pyPdf.generic.ArrayObject((ocgstart,p['/Contents'],ocgend)) - - op.mergePage(p) - - ocg = pyPdf.generic.DictionaryObject() - ocg[pyPdf.generic.NameObject('/Type')] = pyPdf.generic.NameObject('/OCG') - if len(layer_names) > i: - ocg[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject(layer_names[i]) - else: - ocg[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject('Layer %d' % (i+1)) - indirect_ocg = o._addObject(ocg) - properties[ocgname] = indirect_ocg - ocgs.append(indirect_ocg) - - op[resourcekey][propertieskey] = o._addObject(properties) - - ocproperties = pyPdf.generic.DictionaryObject() - ocproperties[pyPdf.generic.NameObject('/OCGs')] = ocgs - defaultview = pyPdf.generic.DictionaryObject() - defaultview[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject('Default') - defaultview[pyPdf.generic.NameObject('/BaseState ')] = pyPdf.generic.NameObject('/ON ') - defaultview[pyPdf.generic.NameObject('/ON')] = ocgs - if reverse_all_but_last: - defaultview[pyPdf.generic.NameObject('/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs[:-1])) - defaultview[pyPdf.generic.NameObject('/Order')].append(ocgs[-1]) - else: - defaultview[pyPdf.generic.NameObject('/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs)) - defaultview[pyPdf.generic.NameObject('/OFF')] = pyPdf.generic.ArrayObject() - - ocproperties[pyPdf.generic.NameObject('/D')] = o._addObject(defaultview) - - o._root.getObject()[pyPdf.generic.NameObject('/OCProperties')] = o._addObject(ocproperties) - - o.write(outfile) - - outfile.close() - infile.close() - - if not output_name: - os.rename(outfilename, filename) - -class PDFPrinter: - """Main class for creating PDF print outs, basically contruct an instance - with appropriate options and then call render_map with your mapnik map - """ - def __init__(self, - pagesize=pagesizes["a4"], - margin=0.005, - box=None, - percent_box=None, - scale=default_scale, - resolution=resolutions.dpi72, - preserve_aspect=True, - centering=centering.constrained, - is_latlon=False, - use_ocg_layers=False): - """Creates a cairo surface and context to render a PDF with. - - pagesize: tuple of page size in meters, see predefined sizes in pagessizes dict (default a4) - margin: page margin in meters (default 0.01) - box: box within the page to render the map into (will not render over margin). This should be - a Mapnik Box2d object. Default is the full page within the margin - percent_box: as per box, but specified as a percent (0->1) of the full page size. If both box - and percent_box are specified percent_box will be used. - scale: scale helper to use when rounding the map scale. This should be a function that - takes a single float and returns a float which is at least as large as the value - passed in. This is a 1:x scale. - resolution: the resolution to render non vector elements at (in DPI), defaults to 72 DPI - preserve_aspect: whether to preserve map aspect ratio. This defaults to True and it - is recommended you do not change it unless you know what you are doing - scales and so on will not work if this is False. - centering: Centering rules for maps where the scale rounding has reduced the map size. - This should be a value from the centering class. The default is to center on the - maps constrained axis, typically this will be horizontal for portrait pages and - vertical for landscape pages. - is_latlon: Is the map in lat lon degrees. If true magic anti meridian logic is enabled - use_ocg_layers: Create OCG layers in the PDF, requires pyPdf >= 1.13 - """ - self._pagesize = pagesize - self._margin = margin - self._box = box - self._scale = scale - self._resolution = resolution - self._preserve_aspect = preserve_aspect - self._centering = centering - self._is_latlon = is_latlon - self._use_ocg_layers = use_ocg_layers - - self._s = None - self._layer_names = [] - self._filename = None - - self.map_box = None - self.scale = None - - # don't both to round the scale if they are not preserving the aspect ratio - if not preserve_aspect: - self._scale = any_scale - - if percent_box: - self._box = Box2d(percent_box[0]*pagesize[0],percent_box[1]*pagesize[1], - percent_box[2]*pagesize[0],percent_box[3]*pagesize[1]) - - if not HAS_PYCAIRO_MODULE: - raise Exception("PDF rendering only available when pycairo is available") - - self.font_name = "DejaVu Sans" - - def finish(self): - if self._s: - self._s.finish() - self._s = None - - if self._use_ocg_layers: - convert_pdf_pages_to_layers(self._filename,layer_names=self._layer_names + ["Legend and Information"],reverse_all_but_last=True) - - def add_geospatial_pdf_header(self,m,filename,epsg=None,wkt=None): - """ Postprocessing step to add geospatial PDF information to PDF file as per - PDF standard 1.7 extension level 3 (also in draft PDF v2 standard at time of writing) - - one of either the epsg code or wkt text for the projection must be provided - - Should be called *after* the page has had .finish() called""" - if HAS_PYPDF and (epsg or wkt): - infile=file(filename,'rb') - (outfd,outfilename) = tempfile.mkstemp(dir=os.path.dirname(filename)) - outfile = os.fdopen(outfd,'wb') - - i=pyPdf.PdfFileReader(infile) - o=pyPdf.PdfFileWriter() - - # preserve OCProperties at document root if we have one - if i.trailer['/Root'].has_key(pyPdf.generic.NameObject('/OCProperties')): - o._root.getObject()[pyPdf.generic.NameObject('/OCProperties')] = i.trailer['/Root'].getObject()[pyPdf.generic.NameObject('/OCProperties')] - - for p in i.pages: - gcs = pyPdf.generic.DictionaryObject() - gcs[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/PROJCS') - if epsg: - gcs[pyPdf.generic.NameObject('/EPSG')]=pyPdf.generic.NumberObject(int(epsg)) - if wkt: - gcs[pyPdf.generic.NameObject('/WKT')]=pyPdf.generic.TextStringObject(wkt) - - measure = pyPdf.generic.DictionaryObject() - measure[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/Measure') - measure[pyPdf.generic.NameObject('/Subtype')]=pyPdf.generic.NameObject('/GEO') - measure[pyPdf.generic.NameObject('/GCS')]=gcs - bounds=pyPdf.generic.ArrayObject() - for x in (0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0): - bounds.append(pyPdf.generic.FloatObject(str(x))) - measure[pyPdf.generic.NameObject('/Bounds')]=bounds - measure[pyPdf.generic.NameObject('/LPTS')]=bounds - gpts=pyPdf.generic.ArrayObject() - - proj=Projection(m.srs) - env=m.envelope() - for x in ((env.minx, env.miny), (env.minx, env.maxy), (env.maxx, env.maxy), (env.maxx, env.miny)): - latlon_corner=proj.inverse(Coord(*x)) - # these are in lat,lon order according to the standard - gpts.append(pyPdf.generic.FloatObject(str(latlon_corner.y))) - gpts.append(pyPdf.generic.FloatObject(str(latlon_corner.x))) - measure[pyPdf.generic.NameObject('/GPTS')]=gpts - - vp=pyPdf.generic.DictionaryObject() - vp[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/Viewport') - bbox=pyPdf.generic.ArrayObject() - - for x in self.map_box: - bbox.append(pyPdf.generic.FloatObject(str(x))) - vp[pyPdf.generic.NameObject('/BBox')]=bbox - vp[pyPdf.generic.NameObject('/Measure')]=measure - - vpa = pyPdf.generic.ArrayObject() - vpa.append(vp) - p[pyPdf.generic.NameObject('/VP')]=vpa - o.addPage(p) - - o.write(outfile) - infile=None - outfile.close() - os.rename(outfilename,filename) - - - def get_context(self): - """allow access so that extra 'bits' can be rendered to the page directly""" - return cairo.Context(self._s) - - def get_width(self): - return self._pagesize[0] - - def get_height(self): - return self._pagesize[1] - - def get_margin(self): - return self._margin - - def write_text(self,ctx,text,box_width=None,size=10, fill_color=(0.0, 0.0, 0.0), alignment=None): - if HAS_PANGOCAIRO_MODULE: - (attr,t,accel) = pango.parse_markup(text) - pctx = pangocairo.CairoContext(ctx) - l = pctx.create_layout() - l.set_attributes(attr) - fd = pango.FontDescription("%s %d" % (self.font_name,size)) - l.set_font_description(fd) - if box_width: - l.set_width(int(box_width*pango.SCALE)) - if alignment: - l.set_alignment(alignment) - pctx.update_layout(l) - l.set_text(t) - pctx.set_source_rgb(*fill_color) - pctx.show_layout(l) - return l.get_pixel_extents()[0] - - else: - ctx.rel_move_to(0,size) - ctx.select_font_face(self.font_name, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) - ctx.set_font_size(size) - ctx.show_text(text) - ctx.rel_move_to(0,size) - return (0,0,len(text)*size,size) - - def _get_context(self): - if HAS_PANGOCAIRO_MODULE: - return - elif HAS_PYCAIRO_MODULE: - return cairo.Context(self._s) - return None - - def _get_render_area(self): - """return a bounding box with the area of the page we are allowed to render out map to - in page coordinates (i.e. meters) - """ - # take off our page margins - render_area = Box2d(self._margin,self._margin,self._pagesize[0]-self._margin,self._pagesize[1]-self._margin) - - #then if user specified a box to render get intersection with that - if self._box: - return render_area.intersect(self._box) - - return render_area - - def _get_render_area_size(self): - """Get the width and height (in meters) of the area we can render the map to, returned as a tuple""" - render_area = self._get_render_area() - return (render_area.width(),render_area.height()) - - def _is_h_contrained(self,m): - """Test if the map size is constrained on the horizontal or vertical axes""" - available_area = self._get_render_area_size() - map_aspect = m.envelope().width()/m.envelope().height() - page_aspect = available_area[0]/available_area[1] - - return map_aspect > page_aspect - - def _get_meta_info_corner(self,render_size,m): - """Get the corner (in page coordinates) of a possibly - sensible place to render metadata such as a legend or scale""" - (x,y) = self._get_render_corner(render_size,m) - if self._is_h_contrained(m): - y += render_size[1]+0.005 - x = self._margin - else: - x += render_size[0]+0.005 - y = self._margin - - return (x,y) - - def _get_render_corner(self,render_size,m): - """Get the corner of the box we should render our map into""" - available_area = self._get_render_area() - - x=available_area[0] - y=available_area[1] - - h_is_contrained = self._is_h_contrained(m) - - if (self._centering == centering.both or - self._centering == centering.horizontal or - (self._centering == centering.constrained and h_is_contrained) or - (self._centering == centering.unconstrained and not h_is_contrained)): - x+=(available_area.width()-render_size[0])/2 - - if (self._centering == centering.both or - self._centering == centering.vertical or - (self._centering == centering.constrained and not h_is_contrained) or - (self._centering == centering.unconstrained and h_is_contrained)): - y+=(available_area.height()-render_size[1])/2 - return (x,y) - - def _get_map_pixel_size(self, width_page_m, height_page_m): - """for a given map size in paper coordinates return a tuple of the map 'pixel' size we - should create at the defined resolution""" - return (int(m2px(width_page_m,self._resolution)), int(m2px(height_page_m,self._resolution))) - - def render_map(self,m, filename): - """Render the given map to filename""" - - # store this for later so we can post process the PDF - self._filename = filename - - # work out the best scale to render out map at given the available space - (eff_width,eff_height) = self._get_render_area_size() - map_aspect = m.envelope().width()/m.envelope().height() - page_aspect = eff_width/eff_height - - scalex=m.envelope().width()/eff_width - scaley=m.envelope().height()/eff_height - - scale=max(scalex,scaley) - - rounded_mapscale=self._scale(scale) - scalefactor = scale/rounded_mapscale - mapw=eff_width*scalefactor - maph=eff_height*scalefactor - if self._preserve_aspect: - if map_aspect > page_aspect: - maph=mapw*(1/map_aspect) - else: - mapw=maph*map_aspect - - # set the map size so that raster elements render at the correct resolution - m.resize(*self._get_map_pixel_size(mapw,maph)) - # calculate the translation for the map starting point - (tx,ty) = self._get_render_corner((mapw,maph),m) - - # create our cairo surface and context and then render the map into it - self._s = cairo.PDFSurface(filename, m2pt(self._pagesize[0]),m2pt(self._pagesize[1])) - ctx=cairo.Context(self._s) - - for l in m.layers: - # extract the layer names for naming layers if we use OCG - self._layer_names.append(l.name) - - layer_map = Map(m.width,m.height,m.srs) - layer_map.layers.append(l) - for s in l.styles: - layer_map.append_style(s,m.find_style(s)) - layer_map.zoom_to_box(m.envelope()) - - def render_map(): - ctx.save() - ctx.translate(m2pt(tx),m2pt(ty)) - #cairo defaults to 72dpi - ctx.scale(72.0/self._resolution,72.0/self._resolution) - render(layer_map, ctx) - ctx.restore() - - # antimeridian - render_map() - if self._is_latlon and (m.envelope().minx < -180 or m.envelope().maxx > 180): - old_env = m.envelope() - if m.envelope().minx < -180: - delta = 360 - else: - delta = -360 - m.zoom_to_box(Box2d(old_env.minx+delta,old_env.miny,old_env.maxx+delta,old_env.maxy)) - render_map() - # restore the original env - m.zoom_to_box(old_env) - - if self._use_ocg_layers: - self._s.show_page() - - self.scale = rounded_mapscale - self.map_box = Box2d(tx,ty,tx+mapw,ty+maph) - - def render_on_map_lat_lon_grid(self,m,dec_degrees=True): - # don't render lat_lon grid if we are already in latlon - if self._is_latlon: - return - p2=Projection(m.srs) - - latlon_bounds = p2.inverse(m.envelope()) - if p2.inverse(m.envelope().center()).x > latlon_bounds.maxx: - latlon_bounds = Box2d(latlon_bounds.maxx,latlon_bounds.miny,latlon_bounds.minx+360,latlon_bounds.maxy) - - if p2.inverse(m.envelope().center()).y > latlon_bounds.maxy: - latlon_bounds = Box2d(latlon_bounds.miny,latlon_bounds.maxy,latlon_bounds.maxx,latlon_bounds.miny+360) - - latlon_mapwidth = latlon_bounds.width() - # render an extra 20% so we generally won't miss the ends of lines - latlon_buffer = 0.2*latlon_mapwidth - if dec_degrees: - latlon_divsize = default_scale(latlon_mapwidth/7.0) - else: - latlon_divsize = deg_min_sec_scale(latlon_mapwidth/7.0) - latlon_interpsize = latlon_mapwidth/m.width - - self._render_lat_lon_axis(m,p2,latlon_bounds.minx,latlon_bounds.maxx,latlon_bounds.miny,latlon_bounds.maxy,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,True) - self._render_lat_lon_axis(m,p2,latlon_bounds.miny,latlon_bounds.maxy,latlon_bounds.minx,latlon_bounds.maxx,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,False) - - def _render_lat_lon_axis(self,m,p2,x1,x2,y1,y2,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,is_x_axis): - ctx=cairo.Context(self._s) - ctx.set_source_rgb(1,0,0) - ctx.set_line_width(1) - latlon_labelsize = 6 - - ctx.translate(m2pt(self.map_box.minx),m2pt(self.map_box.miny)) - ctx.rectangle(0,0,m2pt(self.map_box.width()),m2pt(self.map_box.height())) - ctx.clip() - - ctx.select_font_face("DejaVu", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) - ctx.set_font_size(latlon_labelsize) - - box_top = self.map_box.height() - if not is_x_axis: - ctx.translate(m2pt(self.map_box.width()/2),m2pt(self.map_box.height()/2)) - ctx.rotate(-math.pi/2) - ctx.translate(-m2pt(self.map_box.height()/2),-m2pt(self.map_box.width()/2)) - box_top = self.map_box.width() - - for xvalue in round_grid_generator(x1 - latlon_buffer,x2 + latlon_buffer,latlon_divsize): - yvalue = y1 - latlon_buffer - start_cross = None - end_cross = None - while yvalue < y2+latlon_buffer: - if is_x_axis: - start = m.view_transform().forward(p2.forward(Coord(xvalue,yvalue))) - else: - temp = m.view_transform().forward(p2.forward(Coord(yvalue,xvalue))) - start = Coord(m2pt(self.map_box.height())-temp.y,temp.x) - yvalue += latlon_interpsize - if is_x_axis: - end = m.view_transform().forward(p2.forward(Coord(xvalue,yvalue))) - else: - temp = m.view_transform().forward(p2.forward(Coord(yvalue,xvalue))) - end = Coord(m2pt(self.map_box.height())-temp.y,temp.x) - - ctx.move_to(start.x,start.y) - ctx.line_to(end.x,end.y) - ctx.stroke() - - if cmp(start.y, 0) != cmp(end.y,0): - start_cross = end.x - if cmp(start.y,m2pt(self.map_box.height())) != cmp(end.y, m2pt(self.map_box.height())): - end_cross = end.x - - if dec_degrees: - line_text = "%g" % (xvalue) - else: - line_text = format_deg_min_sec(xvalue) - if start_cross: - ctx.move_to(start_cross+2,latlon_labelsize) - ctx.show_text(line_text) - if end_cross: - ctx.move_to(end_cross+2,m2pt(box_top)-2) - ctx.show_text(line_text) - - def render_on_map_scale(self,m): - (div_size,page_div_size) = self._get_sensible_scalebar_size(m) - - first_value_x = (math.floor(m.envelope().minx / div_size) + 1) * div_size - first_value_x_percent = (first_value_x-m.envelope().minx)/m.envelope().width() - self._render_scale_axis(first_value_x,first_value_x_percent,self.map_box.minx,self.map_box.maxx,page_div_size,div_size,self.map_box.miny,self.map_box.maxy,True) - - first_value_y = (math.floor(m.envelope().miny / div_size) + 1) * div_size - first_value_y_percent = (first_value_y-m.envelope().miny)/m.envelope().height() - self._render_scale_axis(first_value_y,first_value_y_percent,self.map_box.miny,self.map_box.maxy,page_div_size,div_size,self.map_box.minx,self.map_box.maxx,False) - - if self._use_ocg_layers: - self._s.show_page() - self._layer_names.append("Coordinate Grid Overlay") - - def _get_sensible_scalebar_size(self,m,width=-1): - # aim for about 8 divisions across the map - # also make sure we can fit the bar with in page area width if specified - div_size = sequence_scale(m.envelope().width()/8, [1,2,5]) - page_div_size = self.map_box.width()*div_size/m.envelope().width() - while width > 0 and page_div_size > width: - div_size /=2 - page_div_size /= 2 - return (div_size,page_div_size) - - def _render_box(self,ctx,x,y,w,h,text=None,stroke_color=(0,0,0),fill_color=(0,0,0)): - ctx.set_line_width(1) - ctx.set_source_rgb(*fill_color) - ctx.rectangle(x,y,w,h) - ctx.fill() - - ctx.set_source_rgb(*stroke_color) - ctx.rectangle(x,y,w,h) - ctx.stroke() - - if text: - ctx.move_to(x+1,y) - self.write_text(ctx,text,fill_color=[1-z for z in fill_color],size=h-2) - - def _render_scale_axis(self,first,first_percent,start,end,page_div_size,div_size,boundary_start,boundary_end,is_x_axis): - prev = start - text = None - fill=(0,0,0) - border_size=8 - value = first_percent * (end-start) + start - label_value = first-div_size - if self._is_latlon and label_value < -180: - label_value += 360 - - ctx=cairo.Context(self._s) - - if not is_x_axis: - ctx.translate(m2pt(self.map_box.center().x),m2pt(self.map_box.center().y)) - ctx.rotate(-math.pi/2) - ctx.translate(-m2pt(self.map_box.center().y),-m2pt(self.map_box.center().x)) - - while value < end: - ctx.move_to(m2pt(value),m2pt(boundary_start)) - ctx.line_to(m2pt(value),m2pt(boundary_end)) - ctx.set_source_rgb(0.5,0.5,0.5) - ctx.set_line_width(1) - ctx.stroke() - - for bar in (m2pt(boundary_start)-border_size,m2pt(boundary_end)): - self._render_box(ctx,m2pt(prev),bar,m2pt(value-prev),border_size,text,fill_color=fill) - - prev = value - value+=page_div_size - fill = [1-z for z in fill] - label_value += div_size - if self._is_latlon and label_value > 180: - label_value -= 360 - text = "%d" % label_value - else: - for bar in (m2pt(boundary_start)-border_size,m2pt(boundary_end)): - self._render_box(ctx,m2pt(prev),bar,m2pt(end-prev),border_size,fill_color=fill) - - - def render_scale(self,m,ctx=None,width=0.05): - """ m: map to render scale for - ctx: A cairo context to render the scale to. If this is None (the default) then - automatically create a context and choose the best location for the scale bar. - width: Width of area available to render scale bar in (in m) - - will return the size of the rendered scale block in pts - """ - - (w,h) = (0,0) - - # don't render scale if we are lat lon - # dont report scale if we have warped the aspect ratio - if self._preserve_aspect and not self._is_latlon: - bar_size=8.0 - box_count=3 - if ctx is None: - ctx=cairo.Context(self._s) - (tx,ty) = self._get_meta_info_corner((self.map_box.width(),self.map_box.height()),m) - ctx.translate(tx,ty) - - (div_size,page_div_size) = self._get_sensible_scalebar_size(m, width/box_count) - - - div_unit = "m" - if div_size > 1000: - div_size /= 1000 - div_unit = "km" - - text = "0%s" % div_unit - ctx.save() - if width > 0: - ctx.translate(m2pt(width-box_count*page_div_size)/2,0) - for ii in range(box_count): - fill=(ii%2,)*3 - self._render_box(ctx, m2pt(ii*page_div_size), h, m2pt(page_div_size), bar_size, text, fill_color=fill) - fill = [1-z for z in fill] - text = "%g%s" % ((ii+1)*div_size,div_unit) - #else: - # self._render_box(ctx, m2pt(box_count*page_div_size), h, m2pt(page_div_size), bar_size, text, fill_color=(1,1,1), stroke_color=(1,1,1)) - w = (box_count)*page_div_size - h += bar_size - ctx.restore() - - if width > 0: - box_width=m2pt(width) - else: - box_width = None - - font_size=6 - ctx.move_to(0,h) - if HAS_PANGOCAIRO_MODULE: - alignment = pango.ALIGN_CENTER - else: - alignment = None - - text_ext=self.write_text(ctx,"Scale 1:%d" % self.scale,box_width=box_width,size=font_size, alignment=alignment) - h+=text_ext[3]+2 - - return (w,h) - - def render_legend(self,m, page_break=False, ctx=None, collumns=1,width=None, height=None, item_per_rule=False, attribution={}, legend_item_box_size=(0.015,0.0075)): - """ m: map to render legend for - ctx: A cairo context to render the legend to. If this is None (the default) then - automatically create a context and choose the best location for the legend. - width: Width of area available to render legend in (in m) - page_break: move to next page if legen over flows this one - collumns: number of collumns available in legend box - attribution: additional text that will be rendered in gray under the layer name. keyed by layer name - legend_item_box_size: two tuple with width and height of legend item box size in meters - - will return the size of the rendered block in pts - """ - - (w,h) = (0,0) - if self._s: - if ctx is None: - ctx=cairo.Context(self._s) - (tx,ty) = self._get_meta_info_corner((self.map_box.width(),self.map_box.height()),m) - ctx.translate(m2pt(tx),m2pt(ty)) - width = self._pagesize[0]-2*tx - height = self._pagesize[1]-self._margin-ty - - x=0 - y=0 - if width: - cwidth = width/collumns - w=m2pt(width) - else: - cwidth = None - current_collumn = 0 - - processed_layers = [] - for l in reversed(m.layers): - have_layer_header = False - added_styles={} - layer_title = l.name - if layer_title in processed_layers: - continue - processed_layers.append(layer_title) - - # check through the features to find which combinations of styles are active - # for each unique combination add a legend entry - for f in l.datasource.all_features(): - if f.num_geometries() > 0: - active_rules = [] - rule_text = "" - for s in l.styles: - st = m.find_style(s) - for r in st.rules: - # we need to do the scale test here as well so we don't - # add unused scale rules to the legend description - if ((not r.filter) or r.filter.evaluate(f) == '1') and \ - r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale: - active_rules.append((s,r.name)) - if r.filter and str(r.filter) != "true": - if len(rule_text) > 0: - rule_text += " AND " - if r.name: - rule_text += r.name - else: - rule_text += str(r.filter) - active_rules = tuple(active_rules) - if added_styles.has_key(active_rules): - continue - - added_styles[active_rules] = (f,rule_text) - if not item_per_rule: - break - else: - added_styles[l] = (None,None) - - legend_items = added_styles.keys() - legend_items.sort() - for li in legend_items: - if True: - (f,rule_text) = added_styles[li] - - - legend_map_size = (int(m2pt(legend_item_box_size[0])),int(m2pt(legend_item_box_size[1]))) - lemap=Map(legend_map_size[0],legend_map_size[1],srs=m.srs) - if m.background: - lemap.background = m.background - # the buffer is needed to ensure that text labels that overflow the edge of the - # map still render for the legend - lemap.buffer_size=1000 - for s in l.styles: - sty=m.find_style(s) - lestyle = Style() - for r in sty.rules: - for sym in r.symbols: - try: - sym.avoid_edges=False - except: - print "**** Cant set avoid edges for rule", r.name - if r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale: - lerule = r - lerule.min_scale = 0 - lerule.max_scale = float("inf") - lestyle.rules.append(lerule) - lemap.append_style(s,lestyle) - - ds = MemoryDatasource() - if f is None: - ds=l.datasource - layer_srs = l.srs - elif f.envelope().width() == 0: - ds.add_feature(Feature(f.id(),Geometry2d.from_wkt("POINT(0 0)"),**f.attributes)) - lemap.zoom_to_box(Box2d(-1,-1,1,1)) - layer_srs = m.srs - else: - ds.add_feature(f) - layer_srs = l.srs - - lelayer = Layer("LegendLayer",layer_srs) - lelayer.datasource = ds - for s in l.styles: - lelayer.styles.append(s) - lemap.layers.append(lelayer) - - if f is None or f.envelope().width() != 0: - lemap.zoom_all() - lemap.zoom(1.1) - - item_size = legend_map_size[1] - if not have_layer_header: - item_size += 8 - - if y+item_size > m2pt(height): - current_collumn += 1 - y=0 - if current_collumn >= collumns: - if page_break: - self._s.show_page() - x=0 - current_collumn = 0 - else: - break - - if not have_layer_header and item_per_rule: - ctx.move_to(x+m2pt(current_collumn*cwidth),y) - e=self.write_text(ctx, l.name, m2pt(cwidth), 8) - y+=e[3]+2 - have_layer_header = True - ctx.save() - ctx.translate(x+m2pt(current_collumn*cwidth),y) - #extra save around map render as it sets up a clip box and doesn't clear it - ctx.save() - render(lemap, ctx) - ctx.restore() - - ctx.rectangle(0,0,*legend_map_size) - ctx.set_source_rgb(0.5,0.5,0.5) - ctx.set_line_width(1) - ctx.stroke() - ctx.restore() - - ctx.move_to(x+legend_map_size[0]+m2pt(current_collumn*cwidth)+2,y) - legend_entry_size = legend_map_size[1] - legend_text_size = 0 - if not item_per_rule: - rule_text = layer_title - if rule_text: - e=self.write_text(ctx, rule_text, m2pt(cwidth-legend_item_box_size[0]-0.005), 6) - legend_text_size += e[3] - ctx.rel_move_to(0,e[3]) - if attribution.has_key(layer_title): - e=self.write_text(ctx, attribution[layer_title], m2pt(cwidth-legend_item_box_size[0]-0.005), 6, fill_color=(0.5,0.5,0.5)) - legend_text_size += e[3] - - if legend_text_size > legend_entry_size: - legend_entry_size=legend_text_size - - y+=legend_entry_size +2 - if y > h: - h = y - return (w,h) diff --git a/packaging/mapnik/__init__.py b/packaging/mapnik/__init__.py new file mode 100644 index 000000000..1fb21c7ea --- /dev/null +++ b/packaging/mapnik/__init__.py @@ -0,0 +1,406 @@ +# +# This file is part of Mapnik (c++ mapping toolkit) +# Copyright (C) 2024 Artem Pavlenko +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# + +"""Mapnik Python module. + +Python bindings to the Mapnik C++ shared library. + +Several things happen when you do: + + >>> import mapnik + + 1) Mapnik C++ objects are imported via the '__init__.py' from the '_mapnik.so' shared object + (_mapnik.pyd on win) which references libmapnik.so (linux), libmapnik.dylib (mac), or + mapnik.dll (win32). + + 2) The paths to the input plugins and font directories are imported from the 'paths.py' + file which was constructed and installed during SCons installation. + + 3) All available input plugins and TrueType fonts are automatically registered. + +""" + +import itertools +import os +import warnings + +def bootstrap_env(): + """ + If an optional settings file exists, inherit its + environment settings before loading the mapnik library. + + This feature is intended for customized packages of mapnik. + + The settings file should be a python file with an 'env' variable + that declares a dictionary of key:value pairs to push into the + global process environment, if not already set, like: + + env = {'ICU_DATA':'/usr/local/share/icu/'} + """ + if os.path.exists(os.path.join( + os.path.dirname(__file__), 'mapnik_settings.py')): + from .mapnik_settings import env + process_keys = os.environ.keys() + for key, value in env.items(): + if key not in process_keys: + os.environ[key] = value + +bootstrap_env() + +from ._mapnik import * + +def Shapefile(**keywords): + """Create a Shapefile Datasource. + + Required keyword arguments: + file -- path to shapefile without extension + + Optional keyword arguments: + base -- path prefix (default None) + encoding -- file encoding (default 'utf-8') + + >>> from mapnik import Shapefile, Layer + >>> shp = Shapefile(base='/home/mapnik/data',file='world_borders') + >>> lyr = Layer('Shapefile Layer') + >>> lyr.datasource = shp + + """ + return CreateDatasource(type='shape', **keywords) + +def CSV(**keywords): + """Create a CSV Datasource. + + Required keyword arguments: + file -- path to csv + + Optional keyword arguments: + inline -- inline CSV string (if provided 'file' argument will be ignored and non-needed) + base -- path prefix (default None) + encoding -- file encoding (default 'utf-8') + row_limit -- integer limit of rows to return (default: 0) + strict -- throw an error if an invalid row is encountered + escape -- The escape character to use for parsing data + quote -- The quote character to use for parsing data + separator -- The separator character to use for parsing data + headers -- A comma separated list of header names that can be set to add headers to data that lacks them + filesize_max -- The maximum filesize in MB that will be accepted + + >>> from mapnik import CSV + >>> csv = CSV(file='test.csv') + + >>> from mapnik import CSV + >>> csv = CSV(inline='''wkt,Name\n"POINT (120.15 48.47)","Winthrop, WA"''') + + For more information see https://github.com/mapnik/mapnik/wiki/CSV-Plugin + + """ + return CreateDatasource(type='csv', **keywords) + + +def GeoJSON(**keywords): + """Create a GeoJSON Datasource. + + Required keyword arguments: + file -- path to json + + Optional keyword arguments: + encoding -- file encoding (default 'utf-8') + base -- path prefix (default None) + + >>> from mapnik import GeoJSON + >>> geojson = GeoJSON(file='test.json') + + """ + return CreateDatasource(type='geojson', **keywords) + + +def PostGIS(**keywords): + """Create a PostGIS Datasource. + + Required keyword arguments: + dbname -- database name to connect to + table -- table name or subselect query + + *Note: if using subselects for the 'table' value consider also + passing the 'geometry_field' and 'srid' and 'extent_from_subquery' + options and/or specifying the 'geometry_table' option. + + Optional db connection keyword arguments: + user -- database user to connect as (default: see postgres docs) + password -- password for database user (default: see postgres docs) + host -- postgres hostname (default: see postgres docs) + port -- postgres port (default: see postgres docs) + initial_size -- integer size of connection pool (default: 1) + max_size -- integer max of connection pool (default: 10) + persist_connection -- keep connection open (default: True) + + Optional table-level keyword arguments: + extent -- manually specified data extent (comma delimited string, default: None) + estimate_extent -- boolean, direct PostGIS to use the faster, less accurate `estimate_extent` over `extent` (default: False) + extent_from_subquery -- boolean, direct Mapnik to query Postgis for the extent of the raw 'table' value (default: uses 'geometry_table') + geometry_table -- specify geometry table to use to look up metadata (default: automatically parsed from 'table' value) + geometry_field -- specify geometry field to use (default: first entry in geometry_columns) + srid -- specify srid to use (default: auto-detected from geometry_field) + row_limit -- integer limit of rows to return (default: 0) + cursor_size -- integer size of binary cursor to use (default: 0, no binary cursor is used) + + >>> from mapnik import PostGIS, Layer + >>> params = dict(dbname=env['MAPNIK_NAME'],table='osm',user='postgres',password='gis') + >>> params['estimate_extent'] = False + >>> params['extent'] = '-20037508,-19929239,20037508,19929239' + >>> postgis = PostGIS(**params) + >>> lyr = Layer('PostGIS Layer') + >>> lyr.datasource = postgis + + """ + return CreateDatasource(type='postgis', **keywords) + + +def PgRaster(**keywords): + """Create a PgRaster Datasource. + + Required keyword arguments: + dbname -- database name to connect to + table -- table name or subselect query + + *Note: if using subselects for the 'table' value consider also + passing the 'raster_field' and 'srid' and 'extent_from_subquery' + options and/or specifying the 'raster_table' option. + + Optional db connection keyword arguments: + user -- database user to connect as (default: see postgres docs) + password -- password for database user (default: see postgres docs) + host -- postgres hostname (default: see postgres docs) + port -- postgres port (default: see postgres docs) + initial_size -- integer size of connection pool (default: 1) + max_size -- integer max of connection pool (default: 10) + persist_connection -- keep connection open (default: True) + + Optional table-level keyword arguments: + extent -- manually specified data extent (comma delimited string, default: None) + estimate_extent -- boolean, direct PostGIS to use the faster, less accurate `estimate_extent` over `extent` (default: False) + extent_from_subquery -- boolean, direct Mapnik to query Postgis for the extent of the raw 'table' value (default: uses 'geometry_table') + raster_table -- specify geometry table to use to look up metadata (default: automatically parsed from 'table' value) + raster_field -- specify geometry field to use (default: first entry in raster_columns) + srid -- specify srid to use (default: auto-detected from geometry_field) + row_limit -- integer limit of rows to return (default: 0) + cursor_size -- integer size of binary cursor to use (default: 0, no binary cursor is used) + use_overviews -- boolean, use overviews when available (default: false) + prescale_rasters -- boolean, scale rasters on the db side (default: false) + clip_rasters -- boolean, clip rasters on the db side (default: false) + band -- integer, if non-zero interprets the given band (1-based offset) as a data raster (default: 0) + + >>> from mapnik import PgRaster, Layer + >>> params = dict(dbname='mapnik',table='osm',user='postgres',password='gis') + >>> params['estimate_extent'] = False + >>> params['extent'] = '-20037508,-19929239,20037508,19929239' + >>> pgraster = PgRaster(**params) + >>> lyr = Layer('PgRaster Layer') + >>> lyr.datasource = pgraster + + """ + return CreateDatasource(type = 'pgraster', **keywords) + + +def Raster(**keywords): + """Create a Raster (Tiff) Datasource. + + Required keyword arguments: + file -- path to stripped or tiled tiff + lox -- lowest (min) x/longitude of tiff extent + loy -- lowest (min) y/latitude of tiff extent + hix -- highest (max) x/longitude of tiff extent + hiy -- highest (max) y/latitude of tiff extent + + Hint: lox,loy,hix,hiy make a Mapnik Box2d + + Optional keyword arguments: + base -- path prefix (default None) + multi -- whether the image is in tiles on disk (default False) + + Multi-tiled keyword arguments: + x_width -- virtual image number of tiles in X direction (required) + y_width -- virtual image number of tiles in Y direction (required) + tile_size -- if an image is in tiles, how large are the tiles (default 256) + tile_stride -- if an image is in tiles, what's the increment between rows/cols (default 1) + + >>> from mapnik import Raster, Layer + >>> raster = Raster(base='/home/mapnik/data',file='elevation.tif',lox=-122.8,loy=48.5,hix=-122.7,hiy=48.6) + >>> lyr = Layer('Tiff Layer') + >>> lyr.datasource = raster + + """ + return CreateDatasource(type='raster', **keywords) + + +def Gdal(**keywords): + """Create a GDAL Raster Datasource. + + Required keyword arguments: + file -- path to GDAL supported dataset + + Optional keyword arguments: + base -- path prefix (default None) + shared -- boolean, open GdalDataset in shared mode (default: False) + bbox -- tuple (minx, miny, maxx, maxy). If specified, overrides the bbox detected by GDAL. + + >>> from mapnik import Gdal, Layer + >>> dataset = Gdal(base='/home/mapnik/data',file='elevation.tif') + >>> lyr = Layer('GDAL Layer from TIFF file') + >>> lyr.datasource = dataset + + """ + keywords['type'] = 'gdal' + if 'bbox' in keywords: + if isinstance(keywords['bbox'], (tuple, list)): + keywords['bbox'] = ','.join([str(item) + for item in keywords['bbox']]) + return CreateDatasource(**keywords) + + +def Occi(**keywords): + """Create a Oracle Spatial (10g) Vector Datasource. + + Required keyword arguments: + user -- database user to connect as + password -- password for database user + host -- oracle host to connect to (does not refer to SID in tsnames.ora) + table -- table name or subselect query + + Optional keyword arguments: + initial_size -- integer size of connection pool (default 1) + max_size -- integer max of connection pool (default 10) + extent -- manually specified data extent (comma delimited string, default None) + estimate_extent -- boolean, direct Oracle to use the faster, less accurate estimate_extent() over extent() (default False) + encoding -- file encoding (default 'utf-8') + geometry_field -- specify geometry field (default 'GEOLOC') + use_spatial_index -- boolean, force the use of the spatial index (default True) + + >>> from mapnik import Occi, Layer + >>> params = dict(host='myoracle',user='scott',password='tiger',table='test') + >>> params['estimate_extent'] = False + >>> params['extent'] = '-20037508,-19929239,20037508,19929239' + >>> oracle = Occi(**params) + >>> lyr = Layer('Oracle Spatial Layer') + >>> lyr.datasource = oracle + """ + keywords['type'] = 'occi' + return CreateDatasource(**keywords) + + +def Ogr(**keywords): + """Create a OGR Vector Datasource. + + Required keyword arguments: + file -- path to OGR supported dataset + layer -- name of layer to use within datasource (optional if layer_by_index or layer_by_sql is used) + + Optional keyword arguments: + layer_by_index -- choose layer by index number instead of by layer name or sql. + layer_by_sql -- choose layer by sql query number instead of by layer name or index. + base -- path prefix (default None) + encoding -- file encoding (default 'utf-8') + + >>> from mapnik import Ogr, Layer + >>> datasource = Ogr(base='/home/mapnik/data',file='rivers.geojson',layer='OGRGeoJSON') + >>> lyr = Layer('OGR Layer from GeoJSON file') + >>> lyr.datasource = datasource + + """ + keywords['type'] = 'ogr' + return CreateDatasource(**keywords) + + +def SQLite(**keywords): + """Create a SQLite Datasource. + + Required keyword arguments: + file -- path to SQLite database file + table -- table name or subselect query + + Optional keyword arguments: + base -- path prefix (default None) + encoding -- file encoding (default 'utf-8') + extent -- manually specified data extent (comma delimited string, default None) + metadata -- name of auxiliary table containing record for table with xmin, ymin, xmax, ymax, and f_table_name + geometry_field -- name of geometry field (default 'the_geom') + key_field -- name of primary key field (default 'OGC_FID') + row_offset -- specify a custom integer row offset (default 0) + row_limit -- specify a custom integer row limit (default 0) + wkb_format -- specify a wkb type of 'spatialite' (default None) + use_spatial_index -- boolean, instruct sqlite plugin to use Rtree spatial index (default True) + + >>> from mapnik import SQLite, Layer + >>> sqlite = SQLite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239') + >>> lyr = Layer('SQLite Layer') + >>> lyr.datasource = sqlite + + """ + keywords['type'] = 'sqlite' + return CreateDatasource(**keywords) + + +def Rasterlite(**keywords): + """Create a Rasterlite Datasource. + + Required keyword arguments: + file -- path to Rasterlite database file + table -- table name or subselect query + + Optional keyword arguments: + base -- path prefix (default None) + extent -- manually specified data extent (comma delimited string, default None) + + >>> from mapnik import Rasterlite, Layer + >>> rasterlite = Rasterlite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239') + >>> lyr = Layer('Rasterlite Layer') + >>> lyr.datasource = rasterlite + + """ + keywords['type'] = 'rasterlite' + return CreateDatasource(**keywords) + +def register_plugins(path=None): + """Register plugins located by specified path""" + if not path: + if 'MAPNIK_INPUT_PLUGINS_DIRECTORY' in os.environ: + path = os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY') + else: + from .paths import inputpluginspath + path = inputpluginspath + DatasourceCache.register_datasources(path, False) + + +def register_fonts(path=None, valid_extensions=[ + '.ttf', '.otf', '.ttc', '.pfa', '.pfb', '.ttc', '.dfont', '.woff']): + """Recursively register fonts using path argument as base directory""" + if not path: + if 'MAPNIK_FONT_DIRECTORY' in os.environ: + path = os.environ.get('MAPNIK_FONT_DIRECTORY') + else: + from .paths import fontscollectionpath + path = fontscollectionpath + for dirpath, _, filenames in os.walk(path): + for filename in filenames: + if os.path.splitext(filename.lower())[1] in valid_extensions: + FontEngine.register_font(os.path.join(dirpath, filename)) + +# # auto-register known plugins and fonts +register_plugins() +register_fonts() diff --git a/mapnik/mapnik_settings.py b/packaging/mapnik/mapnik_settings.py similarity index 59% rename from mapnik/mapnik_settings.py rename to packaging/mapnik/mapnik_settings.py index 6c48cea38..29959ca0d 100644 --- a/mapnik/mapnik_settings.py +++ b/packaging/mapnik/mapnik_settings.py @@ -1,13 +1,14 @@ import os + mapnik_data_dir = os.path.dirname(os.path.realpath(__file__)) env = {} -icu_path = os.path.join(mapnik_data_dir, 'plugins', 'icu') +icu_path = os.path.join(mapnik_data_dir, 'share', 'icu') if os.path.isdir(icu_path): env['ICU_DATA'] = icu_path -gdal_path = os.path.join(mapnik_data_dir, 'plugins', 'gdal') +gdal_path = os.path.join(mapnik_data_dir, 'share', 'gdal') if os.path.isdir(gdal_path): env['GDAL_DATA'] = gdal_path -proj_path = os.path.join(mapnik_data_dir, 'plugins', 'proj') +proj_path = os.path.join(mapnik_data_dir, 'share', 'proj') if os.path.isdir(proj_path): env['PROJ_LIB'] = proj_path diff --git a/packaging/mapnik/printing/__init__.py b/packaging/mapnik/printing/__init__.py new file mode 100644 index 000000000..bebbc2de5 --- /dev/null +++ b/packaging/mapnik/printing/__init__.py @@ -0,0 +1,1387 @@ +# -*- coding: utf-8 -*- + +"""Mapnik classes to assist in creating printable maps.""" + +import logging +import math +from mapnik import Box2d, Coord, Geometry, Layer, Map, Projection, ProjTransform, Style, render +from mapnik.printing.conversions import m2pt, m2px +from mapnik.printing.formats import pagesizes +from mapnik.printing.scales import any_scale, default_scale, deg_min_sec_scale, sequence_scale + +try: + import cairo +except ImportError: + raise ImportError("Could not import pycairo; PDF rendering only available when pycairo is available") + +try: + import pangocairo + import pango + HAS_PANGOCAIRO_MODULE = True +except ImportError: + HAS_PANGOCAIRO_MODULE = False + +try: + from pypdf import PdfReader, PdfWriter + from pypdf.generic import (ArrayObject, DecodedStreamObject, DictionaryObject, FloatObject, NameObject, + NumberObject, TextStringObject) + HAS_PYPDF = True +except ImportError: + HAS_PYPDF = False + +""" +Style of centering to use with the map. + +CENTERING_NONE: map will be placed in the top left corner +CENTERING_CONSTRAINED_AXIS: map will be centered on the most constrained axis (e.g. vertical for a portrait page); a square + map will be constrained horizontally +CENTERING_UNCONSTRAINED_AXIS: map will be centered on the unconstrained axis +CENTERING_VERTICAL: map will be centered vertically +CENTERING_HORIZONTAL: map will be centered horizontally +CENTERING_BOTH: map will be centered vertically and horizontally +""" +CENTERING_NONE = 0 +CENTERING_CONSTRAINED_AXIS = 1 +CENTERING_UNCONSTRAINED_AXIS = 2 +CENTERING_VERTICAL = 3 +CENTERING_HORIZONTAL = 4 +CENTERING_BOTH = 5 + +# some predefined resolutions in DPI +DPI_72 = 72 +DPI_150 = 150 +DPI_300 = 300 +DPI_600 = 600 + +L = logging.getLogger("mapnik.printing") + + +class PDFPrinter(object): + + """ + Main class for creating PDF print outs. Basic usage is along the lines of + + import mapnik + import mapnik.printing + + page = mapnik.printing.PDFPrinter() + m = mapnik.Map(100,100) + mapnik.load_map(m, "my_xml_map_description", True) + m.zoom_all() + page.render_map(m, "my_output_file.pdf") + """ + + def __init__(self, + pagesize=pagesizes["a4"], + margin=0.005, + box=None, + percent_box=None, + scale_function=default_scale, + resolution=DPI_72, + preserve_aspect=True, + centering=CENTERING_CONSTRAINED_AXIS, + is_latlon=False, + use_ocg_layers=False, + font_name="DejaVu Sans"): + """ + Args: + pagesize: tuple of page size in meters, see predefined sizes in mapnik.formats module + margin: page margin in meters + box: the box to render the map into. Must be within page area, margin excluded. + This should be a Mapnik Box2d object. Default is the full page without margin. + percent_box: similar to box argument but specified as a percent (0->1) of the full page size. + If both box and percent_box are specified percent_box will be used. + scale: scale helper to use when rounding the map scale. This should be a function that takes a single + float and returns a float which is at least as large as the value passed in. This is a 1:x scale. + resolution: the resolution used to render non vector elements (in DPI). + preserve_aspect: whether to preserve map aspect ratio or not. This defaults to True and it is recommended + you do not change it unless you know what you are doing: scales and so on will not work if it is + set to False. + centering: centering rules for maps where the scale rounding has reduced the map size. This should + be a value from the mapnik.utils.centering class. The default is to center on the maps constrained + axis. Typically this will be horizontal for portrait pages and vertical for landscape pages. + is_latlon: whether the map is in lat lon degrees or not. + use_ocg_layers: create OCG layers in the PDF, requires pypdf + font_name: the font name used each time text is written (e.g., legend titles, representative fraction, etc.) + """ + self._pagesize = pagesize + self._margin = margin + self._box = box + self._resolution = resolution + self._centering = centering + self._is_latlon = is_latlon + self._use_ocg_layers = use_ocg_layers + + self._surface = None + self._layer_names = [] + self._filename = None + + self.map_box = None + + self.rounded_mapscale = None + self._scale_function = scale_function + self._preserve_aspect = preserve_aspect + if not preserve_aspect: + self._scale_function = any_scale + + if percent_box: + self._box = Box2d(percent_box[0] * pagesize[0], percent_box[1] * pagesize[1], + percent_box[2] * pagesize[0], percent_box[3] * pagesize[1]) + + self.font_name = font_name + + def render_map(self, m, filename): + """Renders the given map to filename.""" + self._surface = cairo.PDFSurface(filename, m2pt(self._pagesize[0]), m2pt(self._pagesize[1])) + ctx = cairo.Context(self._surface) + + # store the output filename so that we can post-process the PDF + self._filename = filename + + (eff_width, eff_height) = self._get_render_area_size() + (mapw, maph) = self._get_map_render_area_size(m, eff_width, eff_height) + + # set the map pixel size so that raster elements render at specified resolution + m.resize(*self._get_map_pixel_size(mapw, maph)) + + (tx, ty) = self._get_render_corner((mapw, maph), m) + + self._render_map_background(m, ctx, tx, ty) + self._render_layers_maps(m, ctx, tx, ty) + + self.map_box = Box2d(tx, ty, tx + mapw, ty + maph) + + def _get_render_area_size(self): + """Returns the width and height in meters of the page's render area.""" + render_area = self._get_render_area() + return (render_area.width(), render_area.height()) + + def _get_render_area(self): + """Returns the page's area available for rendering. All dimensions are in meters.""" + render_area = Box2d( + self._margin, + self._margin, + self._pagesize[0] - + self._margin, + self._pagesize[1] - + self._margin) + + # if the user specified a box to render to, we take the intersection + # of that box with the page area available + if self._box: + return render_area.intersect(self._box) + + return render_area + + def _get_map_render_area_size(self, m, eff_width, eff_height): + """ + Returns the render area for the map, i.e., a width and height in meters. + Preserves the map aspect by default. + """ + scalefactor = self._get_map_scalefactor(m, eff_width, eff_height) + mapw = eff_width * scalefactor + maph = eff_height * scalefactor + + page_aspect = eff_width / eff_height + map_aspect = m.envelope().width() / m.envelope().height() + if self._preserve_aspect: + if map_aspect > page_aspect: + maph = mapw * (1 / map_aspect) + else: + mapw = maph * map_aspect + + return (mapw, maph) + + def _get_map_scalefactor(self, m ,eff_width, eff_height): + """Returns the map scale factor based on effective render area size in meters.""" + scalex = m.envelope().width() / eff_width + scaley = m.envelope().height() / eff_height + scale = max(scalex, scaley) + rounded_mapscale = self._scale_function(scale) + self.rounded_mapscale = rounded_mapscale + scalefactor = scale / rounded_mapscale + + return scalefactor + + def _get_map_pixel_size(self, width_page_m, height_page_m): + """ + For a given map size in page coordinates, returns a tuple of the map + 'pixel' size based on the defined resolution. + """ + return (int(m2px(width_page_m, self._resolution)), + int(m2px(height_page_m, self._resolution))) + + def _get_render_corner(self, render_size, m): + """Returns the top left corner of the box we should render our map into.""" + available_area = self._get_render_area() + + x = available_area[0] + y = available_area[1] + + if self._has_horizontal_centering(m): + x += (available_area.width() - render_size[0]) / 2 + + if self._has_vertical_centering(m): + y += (available_area.height() - render_size[1]) / 2 + return (x, y) + + def _has_horizontal_centering(self, m): + """Returns whether the map has an horizontal centering or not.""" + is_map_size_constrained = self._is_map_size_constrained(m) + + if (self._centering == CENTERING_BOTH or + self._centering == CENTERING_HORIZONTAL or + (self._centering == CENTERING_CONSTRAINED_AXIS and is_map_size_constrained) or + (self._centering == CENTERING_UNCONSTRAINED_AXIS and not is_map_size_constrained)): + return True + else: + return False + + def _has_vertical_centering(self, m): + """Returns whether the map has a vertical centering or not.""" + is_map_size_constrained = self._is_map_size_constrained(m) + + if (self._centering == CENTERING_BOTH or + self._centering == CENTERING_VERTICAL or + (self._centering == CENTERING_CONSTRAINED_AXIS and not is_map_size_constrained) or + (self._centering == CENTERING_UNCONSTRAINED_AXIS and is_map_size_constrained)): + return True + else: + return False + + def _is_map_size_constrained(self, m): + """Tests whether the map's size is constrained on the horizontal or vertical axes.""" + available_area = self._get_render_area_size() + map_aspect = m.envelope().width() / m.envelope().height() + page_aspect = available_area[0] / available_area[1] + + return map_aspect > page_aspect + + def _render_map_background(self, m, ctx, tx, ty): + """ + Renders the map background if there is one. If the user set use_ocg_layers to True, we put + the background in a separate layer. + """ + if m.background or m.background_image or m.background_color: + background_map = Map(m.width,m.height,m.srs) + if m.background: + background_map.background = m.background + if m.background_image: + background_map.background_image = m.background_image + if m.background_color: + background_map.background_color = m.background_color + + background_map.zoom_to_box(m.envelope()) + self._render_layer_map(background_map, ctx, tx, ty) + + if self._use_ocg_layers: + self._surface.show_page() + self._layer_names.append("Map Background") + + def _render_layers_maps(self, m, ctx, tx, ty): + """Renders a layer as an individual map within a parent Map object.""" + for layer in m.layers: + self._layer_names.append(layer.name) + + layer_map = self._create_layer_map(m, layer) + self._render_layer_map(layer_map, ctx, tx, ty) + + if self.map_spans_antimeridian(m): + old_env = m.envelope() + if m.envelope().minx < -180: + delta = 360 + else: + delta = -360 + m.zoom_to_box( + Box2d( + old_env.minx + delta, + old_env.miny, + old_env.maxx + delta, + old_env.maxy)) + self._render_layer_map(layer_map, ctx, tx, ty) + # restore the original env + m.zoom_to_box(old_env) + + if self._use_ocg_layers: + self._surface.show_page() + + def _create_layer_map(self, m, layer): + """ + Instantiates and returns a Map object for the layer. + The layer Map has the parent Map dimensions. + """ + layer_map = Map(m.width, m.height, m.srs) + layer_map.layers.append(layer) + + for s in layer.styles: + layer_map.append_style(s, m.find_style(s)) + + layer_map.zoom_to_box(m.envelope()) + + return layer_map + + def _render_layer_map(self, layer_map, ctx, tx, ty): + """Renders the layer map. Scales the cairo context to the specified resolution.""" + ctx.save() + ctx.translate(m2pt(tx), m2pt(ty)) + # cairo defaults to 72dpi + ctx.scale(72.0 / self._resolution, 72.0 / self._resolution) + + # we clip the context to the map rectangle in order to restrict the background to that area + ctx.rectangle(0, 0, layer_map.width , layer_map.height) + ctx.clip() + + render(layer_map, ctx) + ctx.restore() + + def map_spans_antimeridian(self, m): + """Returns whether the map spans the antimeridian or not.""" + if self._is_latlon and (m.envelope().minx < -180 or m.envelope().maxx > 180): + return True + else: + return False + + def render_grid_on_map(self, m, grid_layer_name="Coordinates Grid Overlay"): + """ + Adds a grid overlay on the map, i.e., horizontal and vertical axes plus boxes around the map. + + Axes are drawn as 0.5px gray lines. + Boxes alternate between black fill / white stroke and white fill / black stroke. Font is DejaVu Sans. + """ + (div_size, page_div_size) = self._get_sensible_scalebar_size(m) + + # render horizontal axes + (first_value_x, first_value_x_percent) = self._get_scale_axes_first_values( + div_size, + m.envelope().minx, + m.envelope().width()) + self._render_grid_axes_and_boxes_on_map( + first_value_x, + first_value_x_percent, + page_div_size, + div_size, + True) + + # render vertical axes + (first_value_y, first_value_y_percent) = self._get_scale_axes_first_values( + div_size, + m.envelope().miny, + m.envelope().height()) + self._render_grid_axes_and_boxes_on_map( + first_value_y, + first_value_y_percent, + page_div_size, + div_size, + False) + + if self._use_ocg_layers: + self._surface.show_page() + self._layer_names.append(grid_layer_name) + + def _get_sensible_scalebar_size(self, m, num_divisions=8, width=-1): + """ + Returns a sensible scalebar size based on the map envelope, the number of divisions expected + in the scalebar, and optionally the width of the containing box. + """ + div_size = sequence_scale(m.envelope().width() / num_divisions, [1, 2, 5]) + + # ensures we can fit the bar within page area width if specified + page_div_size = self.map_box.width() * div_size / m.envelope().width() + while width > 0 and page_div_size > width: + div_size /= 2.0 + page_div_size /= 2.0 + + return (div_size, page_div_size) + + def _get_scale_axes_first_values(self, div_size, map_envelope_start, map_envelope_side_length): + """ + Returns the first value and the first value percent - how far is that value on the map side length - + for the scale axes. + """ + first_value = (math.floor(map_envelope_start / div_size) + 1) * div_size + first_value_percent = (first_value - map_envelope_start) / map_envelope_side_length + + return (first_value, first_value_percent) + + def _render_grid_axes_and_boxes_on_map(self, first, first_percent, page_div_size, div_size, is_x_axis): + """ + Renders the horizontal or vertical axes and corresponding boxes on the map depending on the is_x_axis + parameter. + + Axes are drawn as 0.5px gray lines. + Boxes alternate between black fill / white stroke and white fill / black stroke. Font is DejaVu Sans. + """ + ctx = cairo.Context(self._surface) + + if is_x_axis: + (start, end, boundary_start, boundary_end) = self.map_box.minx, self.map_box.maxx, self.map_box.miny, self.map_box.maxy + else: + (start, end, boundary_start, boundary_end) = self.map_box.miny, self.map_box.maxy, self.map_box.minx, self.map_box.maxx + + ctx.translate(m2pt(self.map_box.center().x), m2pt(self.map_box.center().y)) + ctx.rotate(-math.pi / 2) + ctx.translate(-m2pt(self.map_box.center().y), -m2pt(self.map_box.center().x)) + + label_value = first - div_size + if self._is_latlon and label_value < -180: + label_value += 360 + + prev = start + text = None + black_rgb = (0.0, 0.0, 0.0) + fill_color = black_rgb + value = first_percent * (end - start) + start + + while value < end: + self._draw_line(ctx, m2pt(value), m2pt(boundary_start), m2pt(value), m2pt(boundary_end), line_width=0.5) + self._render_grid_boxes(ctx, boundary_start, boundary_end, prev, value, text=text, fill_color=fill_color) + + prev = value + value += page_div_size + fill_color = [1.0 - z for z in fill_color] + label_value += div_size + if self._is_latlon and label_value > 180: + label_value -= 360 + text = "%d" % label_value + else: + # ensure that the last box gets drawn + self._render_grid_boxes(ctx, boundary_start, boundary_end, prev, end, fill_color=fill_color) + + def _draw_line(self, ctx, start_x, start_y, end_x, end_y, line_width=1, stroke_color=(0.5, 0.5, 0.5)): + """ + Draws a line from (start_x, start_y) to (end_x, end_y) on the specified cairo context. + By default, the line drawn is 1px wide and gray. + """ + ctx.save() + + ctx.move_to(start_x, start_y) + ctx.line_to(end_x, end_y) + ctx.set_source_rgb(*stroke_color) + ctx.set_line_width(line_width) + ctx.stroke() + + ctx.restore() + + def _render_grid_boxes(self, ctx, boundary_start, boundary_end, prev, value, text=None, border_size=8, fill_color=(0.0, 0.0, 0.0)): + """Renders the scale boxes at each end of the grid overlay.""" + for bar in (m2pt(boundary_start) - border_size, m2pt(boundary_end)): + rectangle = Rectangle(m2pt(prev), bar, m2pt(value - prev), border_size) + self._render_box(ctx, rectangle, text, fill_color=fill_color) + + def _render_box(self, ctx, rectangle, text=None, stroke_color=(0.0, 0.0, 0.0), fill_color=(1.0, 1.0, 1.0)): + """ + Renders a box with top left corner positioned at (x,y). + Default design is white fill and black stroke. + """ + ctx.save() + + line_width = 1 + + ctx.set_line_width(line_width) + ctx.set_source_rgb(*fill_color) + ctx.rectangle(rectangle.x, rectangle.y, rectangle.width, rectangle.height) + ctx.fill() + + ctx.set_source_rgb(*stroke_color) + ctx.rectangle(rectangle.x, rectangle.y, rectangle.width, rectangle.height) + ctx.stroke() + + if text: + ctx.move_to(rectangle.x + 1, rectangle.y) + self.write_text(ctx, text, size=rectangle.height - 2, stroke_color=[1 - z for z in fill_color]) + + ctx.restore() + + def write_text(self, ctx, text, box_width=None, size=10, stroke_color=(0.0, 0.0, 0.0), alignment=None): + """ + Writes the text to the specified context. + + Returns: + A rectangle (x, y, width, height) representing the extents of the text drawn + """ + if HAS_PANGOCAIRO_MODULE: + return self._write_text_pangocairo(ctx, text, box_width=box_width, size=size, stroke_color=stroke_color, alignment=alignment) + else: + return self._write_text_cairo(ctx, text, size=size, stroke_color=stroke_color) + + def _write_text_pangocairo(self, ctx, text, box_width=None, size=10, stroke_color=(0.0, 0.0, 0.0), alignment=None): + """ + Use a pango.Layout object to write text to the cairo Context specified as a parameter. + + Returns: + A rectangle (x, y, width, height) representing the extents of the pango layout as drawn + """ + (attr, t, accel) = pango.parse_markup(text) + pctx = pangocairo.CairoContext(ctx) + + pango_layout = pctx.create_layout() + pango_layout.set_attributes(attr) + + fd = pango.FontDescription("%s %d" % (self.font_name, size)) + pango_layout.set_font_description(fd) + + if box_width: + pango_layout.set_width(int(box_width * pango.SCALE)) + if alignment: + pango_layout.set_alignment(alignment) + pctx.update_layout(pango_layout) + + pango_layout.set_text(t) + pctx.set_source_rgb(*stroke_color) + pctx.show_layout(pango_layout) + + return pango_layout.get_pixel_extents()[0] + + def _write_text_cairo(self, ctx, text, size=10, stroke_color=(0.0, 0.0, 0.0)): + """ + Writes text to the cairo Context specified as a parameter. + + Returns: + A rectangle (x, y, width, height) representing the extents of the text drawn + """ + ctx.rel_move_to(0, size) + ctx.select_font_face( + self.font_name, + cairo.FONT_SLANT_NORMAL, + cairo.FONT_WEIGHT_NORMAL) + ctx.set_font_size(size) + ctx.set_source_rgb(*stroke_color) + ctx.show_text(text) + + ctx.rel_move_to(0, size) + + return (0, 0, len(text) * size, size) + + def render_scale(self, m, ctx=None, width=0.05, num_divisions=3, bar_size=8.0, with_representative_fraction=True): + """ + Renders two things: + - a scale bar + - a scale representative fraction just below it + + Args: + m: the Map object to render the scale for + ctx: A cairo context to render the scale into. If this is None, we create a context and find out + the best location for the scale bar + width: the width of area available for rendering the scale bar (in meters) + num_divisions: the number of divisions for the scale bar + bar_size: the size of the scale bar in points + with_representative_fraction: whether we should render the representative fraction or not + + Returns: + The size of the rendered scale block in points. (0, 0) if nothing is rendered. + + Notes: + Does not render if lat lon maps or if the aspect ratio is not preserved. + The scale bar divisions alternate between black fill / white stroke and white fill / black stroke. + """ + (w, h) = (0, 0) + + # don't render scale text if we are in lat lon + # dont render scale text if we have warped the aspect ratio + if self._preserve_aspect and not self._is_latlon: + + if ctx is None: + ctx = cairo.Context(self._surface) + (tx, ty) = self._get_meta_info_corner((self.map_box.width(), self.map_box.height()), m) + ctx.translate(m2pt(tx), m2pt(ty)) + + (w, h) = self._render_scale_bar(m, ctx, width, w, h, num_divisions, bar_size) + + # renders the scale representative fraction text + if with_representative_fraction: + bar_to_fraction_space = 2 + ctx.move_to(0, h + bar_to_fraction_space) + + box_width = None + if width > 0: + box_width = m2pt(width) + h += self._render_scale_representative_fraction(ctx, box_width) + + return (w, h) + + def _render_scale_bar(self, m, ctx, width=0.05, w=0, h=0, num_divisions=3, bar_size=8.0): + """ + Renders a graphic scale bar. + + Returns: + The width and height of the scale bar rendered + """ + # FIXME: bug. the scale bar divisions does not scale properly when the map envelope is huge + # to reproduce render python-mapnik/test/data/good_maps/agg_poly_gamma_map.xml and call render_scale + + scale_bar_extra_space_factor = 1.2 + div_width = width / num_divisions * scale_bar_extra_space_factor + (div_size, page_div_size) = self._get_sensible_scalebar_size(m, num_divisions=num_divisions, width=div_width) + + div_unit = self.get_div_unit(div_size) + + text = "0{}".format(div_unit) + + ctx.save() + if width > 0: + ctx.translate(m2pt(width - num_divisions * page_div_size) / 2, 0) + for ii in range(num_divisions): + fill = (ii % 2,) * 3 + rectangle = Rectangle(m2pt(ii*page_div_size), h, m2pt(page_div_size), bar_size) + self._render_box(ctx, rectangle, text, fill_color=fill) + fill = [1 - z for z in fill] + text = "{0}{1}".format((ii + 1) * div_size, div_unit) + + w = (num_divisions) * page_div_size + h += bar_size + ctx.restore() + + return (w, h) + + def get_div_unit(self, div_size, div_unit_short="m", div_unit_long="km", div_unit_divisor=1000.0): + """ + Returns the appropriate division unit based on the division size. + + Args: + div_size: the size of the division + div_unit_short: the default string for the division unit + div_unit_long: the string for the division unit if div_size is large enough to be converted + from div_unit_short to div_unit_long while keeping div_size greater than 1 + div_unit_divisor: the divisor applied to convert from div_unit_short to div_unit_long + + Note: + Default values use the metric system + """ + div_unit = div_unit_short + if div_size > div_unit_divisor: + div_size /= div_unit_divisor + div_unit = div_unit_long + + return div_unit + + def _render_scale_representative_fraction(self, ctx, box_width, box_width_padding=2, font_size=6): + """ + Renders the scale text, i.e. + + Returns: + The text extent width including padding. + """ + if HAS_PANGOCAIRO_MODULE: + alignment = pango.ALIGN_CENTER + else: + alignment = None + + text = "Scale 1:{}".format(int(self.rounded_mapscale)) + text_extent = self.write_text(ctx, text, box_width=box_width, size=font_size, alignment=alignment) + + text_extent_width = text_extent[3] + + return text_extent_width + box_width_padding + + def _get_meta_info_corner(self, render_size, m): + """ + Returns the corner (in page coordinates) of a possibly + sensible place to render metadata such as a legend or scale. + """ + (x, y) = self._get_render_corner(render_size, m) + + render_box_padding_in_meters = 0.005 + if self._is_map_size_constrained(m): + y += render_size[1] + render_box_padding_in_meters + x = self._margin + else: + x += render_size[0] + render_box_padding_in_meters + y = self._margin + + return (x, y) + + def render_graticule_on_map(self, m, dec_degrees=True, grid_layer_name="Graticule"): + # FIXME: buggy. does not get the top and right lines and other issues. see _render_graticule_axes_and_text also + + """ + Renders the graticule on the map. + + Lines are drawn as 0.5px wide and gray. + Text font is DejaVu Sans and gray. + """ + + # don't render lat_lon grid if we are already in latlon + if self._is_latlon: + return + + p2 = Projection(m.srs) + latlon_bounds = p2.inverse(m.envelope()) + + # ensure that the projected map envelope is within the lat lon bounds and shift if necessary + latlon_bounds = self._adjust_latlon_bounds(m, p2, latlon_bounds) + + latlon_mapwidth = latlon_bounds.width() + # render an extra 20% so we generally won't miss the ends of lines + latlon_buffer = 0.2 * latlon_mapwidth + if dec_degrees: + # FIXME: what is the 7.0 magic number about? + latlon_divsize = default_scale(latlon_mapwidth / 7.0) + else: + # FIXME: what is the 7.0 magic number about? + latlon_divsize = deg_min_sec_scale(latlon_mapwidth / 7.0) + latlon_interpsize = latlon_mapwidth / m.width + + # renders the horizontal graticule axes + self._render_graticule_axes_and_text( + m, + p2, + latlon_bounds, + latlon_buffer, + latlon_interpsize, + latlon_divsize, + dec_degrees, + True) + + # renders the vertical graticule axes + self._render_graticule_axes_and_text( + m, + p2, + latlon_bounds, + latlon_buffer, + latlon_interpsize, + latlon_divsize, + dec_degrees, + False) + + if self._use_ocg_layers: + self._surface.show_page() + self._layer_names.append(grid_layer_name) + + def _adjust_latlon_bounds(self, m, proj, latlon_bounds): + """ + Ensures that the projected map envelope is within the lat lon bounds. + If it's not, it shifts the lat lon bounds in the right direction by 360 degrees. + + Returns: + The adjusted lat lon bounds box + """ + if proj.inverse(m.envelope().center()).x > latlon_bounds.maxx: + latlon_bounds = Box2d( + latlon_bounds.maxx, + latlon_bounds.miny, + latlon_bounds.minx + 360, + latlon_bounds.maxy) + if proj.inverse(m.envelope().center()).y > latlon_bounds.maxy: + latlon_bounds = Box2d( + latlon_bounds.miny, + latlon_bounds.maxy, + latlon_bounds.maxx, + latlon_bounds.miny + 360) + + return latlon_bounds + + def _render_graticule_axes_and_text(self, m, p2, latlon_bounds, latlon_buffer, + latlon_interpsize, latlon_divsize, dec_degrees, is_x_axis, stroke_color=(0.5, 0.5, 0.5)): + # FIXME: buggy. does not get the top and right lines and other issues. see render_graticule_on_map also + """ + Renders the horizontal or vertical axes on the map - depending on the is_x_axis parameter - along with + the latitude or longitude text. + + Lines are drawn as 0.5px gray. + Text font is DejaVu Sans gray. + """ + + ctx = cairo.Context(self._surface) + ctx.set_source_rgb(*stroke_color) + ctx.set_line_width(1) + latlon_labelsize = 6 + + ctx.translate(m2pt(self.map_box.minx), m2pt(self.map_box.miny)) + ctx.rectangle(0, 0, m2pt(self.map_box.width()), m2pt(self.map_box.height())) + ctx.clip() + + ctx.select_font_face(self.font_name, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) + ctx.set_font_size(latlon_labelsize) + + if is_x_axis: + (x1, x2, y1, y2) = latlon_bounds.minx, latlon_bounds.maxx, latlon_bounds.miny, latlon_bounds.maxy + box_top = self.map_box.height() + else: + (x1, x2, y1, y2) = latlon_bounds.miny, latlon_bounds.maxy, latlon_bounds.minx, latlon_bounds.maxx + ctx.translate(m2pt(self.map_box.width() / 2), m2pt(self.map_box.height() / 2)) + ctx.rotate(-math.pi / 2) + ctx.translate(-m2pt(self.map_box.height() / 2), -m2pt(self.map_box.width() / 2)) + box_top = self.map_box.width() + + for xvalue in self.round_grid_generator(x1 - latlon_buffer, x2 + latlon_buffer, latlon_divsize): + yvalue = y1 - latlon_buffer + start_cross = None + end_cross = None + while yvalue < y2 + latlon_buffer: + if is_x_axis: + start = m.view_transform().forward(p2.forward(Coord(xvalue, yvalue))) + else: + temp = m.view_transform().forward(p2.forward(Coord(yvalue, xvalue))) + start = Coord(m2pt(self.map_box.height()) - temp.y, temp.x) + yvalue += latlon_interpsize + if is_x_axis: + end = m.view_transform().forward(p2.forward(Coord(xvalue, yvalue))) + else: + temp = m.view_transform().forward(p2.forward(Coord(yvalue, xvalue))) + end = Coord(m2pt(self.map_box.height()) - temp.y, temp.x) + + self._draw_line(ctx, start.x, start.y, end.x, end.y, line_width=0.5) + + if cmp(start.y, 0) != cmp(end.y, 0): + start_cross = end.x + if cmp(start.y, m2pt(self.map_box.height())) != cmp(end.y, m2pt(self.map_box.height())): + end_cross = end.x + + if dec_degrees: + line_text = "%g" % (xvalue) + else: + line_text = self.format_deg_min_sec(xvalue) + + if start_cross: + ctx.move_to(start_cross + 2, latlon_labelsize) + ctx.show_text(line_text) + if end_cross: + ctx.move_to(end_cross + 2, m2pt(box_top) - 2) + ctx.show_text(line_text) + + def round_grid_generator(self, first, last, step): + """Generator for lat lon grid values.""" + val = (math.floor(first / step) + 1) * step + yield val + while val < last: + val += step + yield val + + def format_deg_min_sec(self, value): + """Converts decimal degrees value to a degrees/minutes/seconds string.""" + deg = math.floor(value) + min = math.floor((value - deg) / (1.0 / 60)) + sec = int((value - deg * 1.0 / 60) / 1.0 / 3600) + return "%d°%d'%d\"" % (deg, min, sec) + + def render_legend(self, m, ctx=None, columns=2, width=None, height=None, attribution=None, legend_item_box_size=(0.015, 0.0075)): + """ + Renders a legend for the Map object. A legend is a collection of legend items, i.e., a minified + representation of the layer's map along with the layer's title. + + Args: + m: a Map object to render the legend for + ctx: a cairo context to render the legend to. If this is None then automatically create a context + and choose the best location for the legend. + width: width of area available to render legend in (in meters) + columns: number of columns available in legend box + attribution: additional text that will be rendered in gray under the layer name. keyed by layer name + legend_item_box_size: two tuple with width and height of legend item box size in meters + + Returns: + The size of the rendered block in points. + """ + render_box = Rectangle() + if self._surface: + if ctx is None: + ctx = cairo.Context(self._surface) + (tx, ty) = self._get_meta_info_corner((self.map_box.width(), self.map_box.height()), m) + ctx.translate(m2pt(tx), m2pt(ty)) + width = self._pagesize[0] - 2 * tx + height = self._pagesize[1] - self._margin - ty + + column_width = None + if width: + column_width = width / columns + render_box.width = m2pt(width) + + (render_box.width, render_box.height) = self._render_legend_items(m, ctx, render_box, column_width, height, + columns=columns, attribution=attribution, legend_item_box_size=legend_item_box_size) + + return (render_box.width, render_box.height) + + def _render_legend_items(self, m, ctx, render_box, column_width, height, columns=2, attribution=None, legend_item_box_size=(0.015, 0.0075)): + """Renders the legend items for the map.""" + current_column = 0 + processed_layers = [] + for layer in reversed(m.layers): + have_layer_header = False + layer_title = layer.name + if layer_title in processed_layers: + continue + processed_layers.append(layer_title) + + added_styles = self._get_unique_added_styles(m, layer) + legend_items = sorted(added_styles.keys()) + for li in legend_items: + (f, rule_text) = added_styles[li] + + legend_map_size = (int(m2pt(legend_item_box_size[0])), int(m2pt(legend_item_box_size[1]))) + lemap = self._create_legend_item_map(m, layer, f, legend_map_size) + + item_size = legend_map_size[1] + if not have_layer_header: + item_size += 8 + + # if we get to the bottom of the page, start a new column + # if we get to the max number of columns, start a new page + if render_box.y + item_size > m2pt(height): + current_column += 1 + render_box.y = 0 + if current_column >= columns: + self._surface.show_page() + render_box.x = 0 + current_column = 0 + + self._render_legend_item_map( + lemap, legend_map_size, ctx, render_box.x, render_box.y, current_column, column_width) + + ctx.move_to( + render_box.x + legend_map_size[0] + m2pt(current_column * column_width) + 2, render_box.y) + + legend_entry_size = self._render_legend_item_text( + ctx, legend_map_size, legend_item_box_size, column_width, layer_title, attribution) + + vertical_spacing = 5 + render_box.y += legend_entry_size + vertical_spacing + if render_box.y > render_box.height: + render_box.height = render_box.y + + return (render_box.width, render_box.height) + + def _get_unique_added_styles(self, m, layer): + """ + Go through the features to find which combinations of styles are active. + For each unique combination add a legend entry. + """ + added_styles = {} + for f in layer.datasource.all_features(): + if f.geometry: + active_rules = [] + rule_text = "" + for s in layer.styles: + st = m.find_style(s) + for r in st.rules: + if self._is_rule_within_map_scale_limits(m, f, r): + active_rules.append((s, r.name)) + rule_text = self._get_rule_text(r, rule_text) + + active_rules = tuple(active_rules) + if active_rules in added_styles: + continue + + added_styles[active_rules] = (f, rule_text) + break + else: + added_styles[layer] = (None, None) + + return added_styles + + def _is_rule_within_map_scale_limits(self, m, feature, rule): + """Returns whether the rule is within the map scale limits or not.""" + if ((not rule.filter) or rule.filter.evaluate(feature) == '1') and \ + rule.min_scale <= m.scale_denominator() and m.scale_denominator() < rule.max_scale: + return True + else: + return False + + def _create_legend_item_map(self, m, layer, f, legend_map_size): + """Creates the legend map, i.e., a minified version of the layer map, and returns it.""" + from mapnik import MemoryDatasource + + legend_map = Map(legend_map_size[0], legend_map_size[1], srs=m.srs) + + # the buffer is needed to ensure that text labels that overflow the edge of the + # map still render for the legend + legend_map.buffer_size = 1000 + for layer_style in layer.styles: + lestyle = self._get_layer_style_valid_rules(m, layer_style) + legend_map.append_style(layer_style, lestyle) + + ds = MemoryDatasource() + if f is None: + ds = layer.datasource + layer_srs = layer.srs + elif f.envelope().width() == 0: + f.geometry = Geometry.from_wkt('POINT (0 0)') + ds.add_feature(f) + legend_map.zoom_to_box(Box2d(-1, -1, 1, 1)) + layer_srs = m.srs + else: + ds.add_feature(f) + layer_srs = layer.srs + + lelayer = Layer("LegendLayer", layer_srs) + lelayer.datasource = ds + for layer_style in layer.styles: + lelayer.styles.append(layer_style) + legend_map.layers.append(lelayer) + + if f is None or f.envelope().width() != 0: + legend_map.zoom_all() + legend_map.zoom(1.1) + + return legend_map + + def _get_layer_style_valid_rules(self, m, layer_style): + """Filters out the layer style rules that are not valid for the Map and returns the style.""" + style = m.find_style(layer_style) + legend_style = Style() + for r in style.rules: + for sym in r.symbols: + try: + sym.avoid_edges = False + except AttributeError: + L.warning("Could not set avoid_edges for rule %s", r.name) + if r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale: + legend_rule = r + legend_rule.min_scale = 0 + legend_rule.max_scale = float("inf") + legend_style.rules.append(legend_rule) + + return legend_style + + def _render_legend_item_map(self, lemap, legend_map_size, ctx, x, y, current_column, column_width, stroke_color=(0.5, 0.5, 0.5), line_width=1): + """Renders the legend item map.""" + ctx.save() + ctx.translate(x + m2pt(current_column * column_width), y) + + # extra save around map render as it sets up a clip box and doesn't clear it + ctx.save() + render(lemap, ctx) + ctx.restore() + + ctx.rectangle(0, 0, *legend_map_size) + ctx.set_source_rgb(*stroke_color) + ctx.set_line_width(line_width) + ctx.stroke() + ctx.restore() + + def _render_legend_item_text(self, ctx, legend_map_size, legend_item_box_size, column_width, layer_title, attribution=None): + """ + Renders the legend item text next to the legend item box. + + Returns: + The size of the legend entry size, i.e., the legend box height or + the legend text height depending on which one takes more vertical + space. + """ + gray_rgb = (0.5, 0.5, 0.5) + legend_box_padding_in_meters = 0.005 + legend_box_width = m2pt(column_width - legend_item_box_size[0] - legend_box_padding_in_meters) + + legend_entry_size = legend_map_size[1] + legend_text_size = 0 + + rule_text = layer_title + if rule_text: + e = self.write_text(ctx, rule_text, box_width=legend_box_width, size=6) + legend_text_size += e[3] + ctx.rel_move_to(0, e[3]) + if attribution: + if layer_title in attribution: + e = self.write_text( + ctx, + attribution[layer_title], + box_width=legend_box_width, + size=6, + stroke_color=gray_rgb) + legend_text_size += e[3] + + if legend_text_size > legend_entry_size: + legend_entry_size = legend_text_size + + return legend_entry_size + + def _get_rule_text(self, rule, rule_text): + """Returns the rule text.""" + if rule.filter and str(rule.filter) != "true": + if len(rule_text) > 0: + rule_text += " AND " + if rule.name: + rule_text += rule.name + else: + rule_text += str(rule.filter) + + return rule_text + + def finish(self): + """ + Finishes the cairo surface and converts PDF pages to PDF layers if + _use_ocg_layers was set to True. + """ + if self._surface: + self._surface.finish() + self._surface = None + + if self._use_ocg_layers: + self.convert_pdf_pages_to_layers( + self._filename, + layer_names=self._layer_names + + ["Legend and Information"], + reverse_all_but_last=True) + + def convert_pdf_pages_to_layers(self, filename, layer_names=None, reverse_all_but_last=True): + """ + Takes a multi pages PDF as input and converts each page to a layer in a single page PDF. + + Note: + requires pypdf to be available + + Args: + layer_names should be a sequence of the user visible names of the layers, if not given + or if shorter than num pages generic names will be given to the unnamed layers + + if output_name is not provided a temporary file will be used for the conversion which + will then be copied back over the source file. + """ + if not HAS_PYPDF: + raise RuntimeError("pypdf not available; pypdf required to convert pdf pages to layers") + + with open(filename, "rb+") as f: + file_reader = PdfReader(f) + file_writer = PdfWriter() + + template_page_size = file_reader.pages[0].mediabox + output_pdf = file_writer.add_blank_page( + width=template_page_size.width, + height=template_page_size.height) + + content_key = NameObject('/Contents') + output_pdf[content_key] = ArrayObject() + + resource_key = NameObject('/Resources') + output_pdf[resource_key] = DictionaryObject() + + (properties, ocgs) = self._make_ocg_layers(file_reader, file_writer, output_pdf, layer_names) + + properties_key = NameObject('/Properties') + output_pdf[resource_key][properties_key] = file_writer._add_object(properties) + + ocproperties = DictionaryObject() + ocproperties[NameObject('/OCGs')] = ocgs + + default_view = self._get_pdf_default_view(ocgs, reverse_all_but_last) + ocproperties[NameObject('/D')] = file_writer._add_object(default_view) + + file_writer._root_object[NameObject('/OCProperties')] = file_writer._add_object(ocproperties) + + f.seek(0) + file_writer.write(f) + f.truncate() + + def _make_ocg_layers(self, file_reader, file_writer, output_pdf, layer_names=None): + """ + Makes the OCGs layers. + + Returns: + properties: a dictionary mapping the OCG layer name and the OCG layer property list + ocgs: an array containing the OCG layers + """ + properties = DictionaryObject() + ocgs = ArrayObject() + + for (idx, page) in enumerate(file_reader.pages): + # first start an OCG for the layer + ocg_name = NameObject('/oc%d' % idx) + ocgs_start = DecodedStreamObject() + ocgs_start._data = "/OC %s BDC\n" % ocg_name + ocg_end = DecodedStreamObject() + ocg_end._data = "EMC\n" + + if isinstance(page['/Contents'], ArrayObject): + page[NameObject('/Contents')].insert(0, ocgs_start) + page[NameObject('/Contents')].append(ocg_end) + else: + page[NameObject( + '/Contents')] = ArrayObject((ocgs_start, page['/Contents'], ocg_end)) + + output_pdf.merge_page(page) + + ocg = DictionaryObject() + ocg[NameObject('/Type')] = NameObject('/OCG') + + if layer_names and len(layer_names) > idx: + ocg[NameObject('/Name')] = TextStringObject(layer_names[idx]) + else: + ocg[NameObject('/Name')] = TextStringObject('Layer %d' % (idx + 1)) + + indirect_ocg = file_writer._add_object(ocg) + properties[ocg_name] = indirect_ocg + ocgs.append(indirect_ocg) + + return (properties, ocgs) + + def _get_pdf_default_view(self, ocgs, reverse_all_but_last=True): + """ + Returns the D configuration dictionary of the PDF. + + The D configuration dictionary specifies the initial state of the optional content + groups when a PDF is first opened. + """ + default_view = DictionaryObject() + default_view[NameObject('/Name')] = TextStringObject('Default') + default_view[NameObject('/BaseState ')] = NameObject('/ON ') + default_view[NameObject('/ON')] = ocgs + default_view[NameObject('/OFF')] = ArrayObject() + + if reverse_all_but_last: + default_view[NameObject('/Order')] = ArrayObject(reversed(ocgs[:-1])) + default_view[NameObject('/Order')].append(ocgs[-1]) + else: + default_view[NameObject('/Order')] = ArrayObject(reversed(ocgs)) + + return default_view + + def add_geospatial_pdf_header(self, m, filename, epsg=None, wkt=None): + """ + Adds geospatial PDF information to the PDF file as per: + Adobe® Supplement to the ISO 32000 PDF specification + BaseVersion: 1.7 + ExtensionLevel: 3 + (June 2008) + + Notes: + The epsg code or the wkt text of the projection must be provided. + Must be called *after* the page has had .finish() called. + """ + if not HAS_PYPDF: + raise RuntimeError("pypdf not available; pypdf required to add geospatial header to PDF") + + if not any((epsg,wkt)): + raise RuntimeError("EPSG or WKT required to add geospatial header to PDF") + + with open(filename, "rb+") as f: + file_reader = PdfReader(f) + file_writer = PdfWriter() + + # preserve OCProperties at document root if we have one + if NameObject('/OCProperties') in file_reader.trailer['/Root']: + file_writer._root_object[NameObject('/OCProperties')] = file_reader.trailer[ + '/Root'].get_object()[NameObject('/OCProperties')] + + for page in file_reader.pages: + gcs = DictionaryObject() + gcs[NameObject('/Type')] = NameObject('/PROJCS') + + if epsg: + gcs[NameObject('/EPSG')] = NumberObject(int(epsg)) + if wkt: + gcs[NameObject('/WKT')] = TextStringObject(wkt) + + measure = self._get_pdf_measure(m, gcs) + page[NameObject('/VP')] = self._get_pdf_vp(measure) + + file_writer.add_page(page) + + f.seek(0) + file_writer.write(f) + f.truncate() + + def _get_pdf_measure(self, m, gcs): + """ + Returns the PDF Measure dictionary. + + The Measure dictionary is used in the viewport array + and specifies the scale and units that apply to the output map. + """ + measure = DictionaryObject() + measure[NameObject('/Type')] = NameObject('/Measure') + measure[NameObject('/Subtype')] = NameObject('/GEO') + measure[NameObject('/GCS')] = gcs + + bounds = self._get_pdf_bounds() + measure[NameObject('/Bounds')] = bounds + measure[NameObject('/LPTS')] = bounds + + measure[NameObject('/GPTS')] = self._get_pdf_gpts(m) + + return measure + + def _get_pdf_bounds(self): + """ + Returns the PDF BOUNDS array. + + The PDF's bounds array is equivalent to the map's neatline, i.e., + the border delineating the extent of geographic data on the output map. + """ + bounds = ArrayObject() + + # PDF specification's default for bounds (full unit square) + bounds_default = (0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0) + + for x in bounds_default: + bounds.append(FloatObject(str(x))) + + return bounds + + def _get_pdf_gpts(self, m): + """ + Returns the GPTS array object containing the four corners of the + map envelope in map projection. + + The GPTS entry is an array of numbers, taken pairwise, defining + points as latitude and longitude. + """ + gpts = ArrayObject() + + tr = ProjTransform(Projection(m.srs), Projection("epsg:4326")) + env = m.envelope() + for p in ((env.minx, env.miny), (env.minx, env.maxy), + (env.maxx, env.maxy), (env.maxx, env.miny)): + latlon_corner = tr.forward(Coord(*p)) + # these are in lat,lon order according to the specification + gpts.append(FloatObject(str(latlon_corner.y))) + gpts.append(FloatObject(str(latlon_corner.x))) + + return gpts + + def _get_pdf_vp(self, measure): + """ + Returns the PDF's VP array. + + The VP entry is an array of viewport dictionaries. A viewport is basiscally + a rectangular region on the PDF page. The only required entry is the BBox which + specifies the location of the viewport on the page. + """ + viewport = DictionaryObject() + viewport[NameObject('/Type')] = NameObject('/Viewport') + + bbox = ArrayObject() + for x in self.map_box: + # this should be converted from meters to points + # fix submitted in https://github.com/mapnik/python-mapnik/pull/115 + bbox.append(FloatObject(str(x))) + + viewport[NameObject('/BBox')] = bbox + viewport[NameObject('/Measure')] = measure + + vp_array = ArrayObject() + vp_array.append(viewport) + + return vp_array + + def get_width(self): + """Returns page's width.""" + return self._pagesize[0] + + def get_height(self): + """Returns page's height.""" + return self._pagesize[1] + + def get_margin(self): + """Returns page's margin.""" + return self._margin + + def get_cairo_context(self): + """ + Allows access to the cairo Context so that extra 'bits' + can be rendered to the page directly. + """ + return cairo.Context(self._surface) + + +class Rectangle(object): + + def __init__(self, x=0, y=0, width=0, height=0): + self.x = x + self.y = y + self.width = width + self.height = height + + def __repr__(self): + return "({}, {}, {}, {})".format(self.x, self.y, self.width, self.height) + + def origin(self): + """Returns the top left corner coordinates in pdf points.""" + return (self.x, self.y) diff --git a/packaging/mapnik/printing/conversions.py b/packaging/mapnik/printing/conversions.py new file mode 100644 index 000000000..c08c5e808 --- /dev/null +++ b/packaging/mapnik/printing/conversions.py @@ -0,0 +1,17 @@ +"""Unit conversion helpers.""" + +def m2pt(x, pt_size=0.0254/72.0): + """Converts distance from meters to points. Default value is PDF point size.""" + return x / pt_size + +def pt2m(x, pt_size=0.0254/72.0): + """Converts distance from points to meters. Default value is PDF point size.""" + return x * pt_size + +def m2in(x): + """Converts distance from meters to inches.""" + return x / 0.0254 + +def m2px(x, resolution): + """Converts distance from meters to pixels at the given resolution in DPI/PPI.""" + return m2in(x) * resolution diff --git a/packaging/mapnik/printing/formats.py b/packaging/mapnik/printing/formats.py new file mode 100644 index 000000000..e2b3a3c4f --- /dev/null +++ b/packaging/mapnik/printing/formats.py @@ -0,0 +1,74 @@ +"""Some predefined page sizes in meters.""" + +pagesizes = { + "a0": (0.841000, 1.189000), + "a0l": (1.189000, 0.841000), + "b0": (1.000000, 1.414000), + "b0l": (1.414000, 1.000000), + "c0": (0.917000, 1.297000), + "c0l": (1.297000, 0.917000), + "a1": (0.594000, 0.841000), + "a1l": (0.841000, 0.594000), + "b1": (0.707000, 1.000000), + "b1l": (1.000000, 0.707000), + "c1": (0.648000, 0.917000), + "c1l": (0.917000, 0.648000), + "a2": (0.420000, 0.594000), + "a2l": (0.594000, 0.420000), + "b2": (0.500000, 0.707000), + "b2l": (0.707000, 0.500000), + "c2": (0.458000, 0.648000), + "c2l": (0.648000, 0.458000), + "a3": (0.297000, 0.420000), + "a3l": (0.420000, 0.297000), + "b3": (0.353000, 0.500000), + "b3l": (0.500000, 0.353000), + "c3": (0.324000, 0.458000), + "c3l": (0.458000, 0.324000), + "a4": (0.210000, 0.297000), + "a4l": (0.297000, 0.210000), + "b4": (0.250000, 0.353000), + "b4l": (0.353000, 0.250000), + "c4": (0.229000, 0.324000), + "c4l": (0.324000, 0.229000), + "a5": (0.148000, 0.210000), + "a5l": (0.210000, 0.148000), + "b5": (0.176000, 0.250000), + "b5l": (0.250000, 0.176000), + "c5": (0.162000, 0.229000), + "c5l": (0.229000, 0.162000), + "a6": (0.105000, 0.148000), + "a6l": (0.148000, 0.105000), + "b6": (0.125000, 0.176000), + "b6l": (0.176000, 0.125000), + "c6": (0.114000, 0.162000), + "c6l": (0.162000, 0.114000), + "a7": (0.074000, 0.105000), + "a7l": (0.105000, 0.074000), + "b7": (0.088000, 0.125000), + "b7l": (0.125000, 0.088000), + "c7": (0.081000, 0.114000), + "c7l": (0.114000, 0.081000), + "a8": (0.052000, 0.074000), + "a8l": (0.074000, 0.052000), + "b8": (0.062000, 0.088000), + "b8l": (0.088000, 0.062000), + "c8": (0.057000, 0.081000), + "c8l": (0.081000, 0.057000), + "a9": (0.037000, 0.052000), + "a9l": (0.052000, 0.037000), + "b9": (0.044000, 0.062000), + "b9l": (0.062000, 0.044000), + "c9": (0.040000, 0.057000), + "c9l": (0.057000, 0.040000), + "a10": (0.026000, 0.037000), + "a10l": (0.037000, 0.026000), + "b10": (0.031000, 0.044000), + "b10l": (0.044000, 0.031000), + "c10": (0.028000, 0.040000), + "c10l": (0.040000, 0.028000), + "letter": (0.216, 0.279), + "letterl": (0.279, 0.216), + "legal": (0.216, 0.356), + "legall": (0.356, 0.216), +} diff --git a/packaging/mapnik/printing/scales.py b/packaging/mapnik/printing/scales.py new file mode 100644 index 000000000..2cb0db20b --- /dev/null +++ b/packaging/mapnik/printing/scales.py @@ -0,0 +1,46 @@ +"""Scale helpers functions.""" + +import math + + +def any_scale(scale): + """Scale helper function that allows any scale.""" + return scale + +def sequence_scale(scale, scale_sequence): + """Sequence scale helper, this rounds scale to a 'sensible' value.""" + factor = math.floor(math.log10(scale)) + norm = scale / (10**factor) + + for s in scale_sequence: + if norm <= s: + return s * 10**factor + + return scale_sequence[0] * 10**(factor + 1) + +def default_scale(scale): + """Default scale helper, this rounds scale to a 'sensible' value.""" + return sequence_scale(scale, (1, 1.25, 1.5, 1.75, 2, 2.5, 3, 4, 5, 6, 7.5, 8, 9, 10)) + +def deg_min_sec_scale(scale): + for x in (1.0 / 3600, + 2.0 / 3600, + 5.0 / 3600, + 10.0 / 3600, + 30.0 / 3600, + 1.0 / 60, + 2.0 / 60, + 5.0 / 60, + 10.0 / 60, + 30.0 / 60, + 1, + 2, + 5, + 10, + 30, + 60 + ): + if scale < x: + return x + else: + return x diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..bfa7d31d1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] +requires = [ + "setuptools >= 80.9.0", + "pybind11 >= 3.0.1", +] +build-backend = "setuptools.build_meta" + +[project] +name = "mapnik" +version = "4.2.1.beta" +description = "Python bindings for Mapnik" +license = "LGPL-2.1-or-later" + +keywords = ["mapnik", "beautiful maps", "cartography", "python-mapnik"] + +classifiers = [ + "Development Status :: 4 - Beta", +] +authors = [ +{name= "Artem Pavlenko", email = "artem@mapnik.org"}, +] +maintainers = [ +{name= "Artem Pavlenko", email = "artem@mapnik.org"}, +] + +requires-python = ">= 3.9" + +[project.urls] +Homepage = "https://mapnik.org" +Documentation = "https://github.com/mapnik/python-mapnik/wiki" +Repository = "https://github.com/mapnik/python-mapnik" +"Bug Tracker" = "https://github.com/mapnik/python-mapnik/issues" +Changelog = "https://github.com/mapnik/python-mapnik/blob/master/CHANGELOG.md" + +[tool.pytest.ini_options] +minversion = "8.0" +testpaths = [ + "test/python_tests", +] \ No newline at end of file diff --git a/scripts/mason.sh b/scripts/mason.sh new file mode 100644 index 000000000..fbf13cf23 --- /dev/null +++ b/scripts/mason.sh @@ -0,0 +1,189 @@ +#!/usr/bin/env bash + +# Mason Client Version 1.0.0 + +# See below for `set -euo pipefail` + +# Print file + line number when not in CLI mode +if [[ "$0" != "$BASH_SOURCE" ]]; then +function mason_error { + local _LINE _FN _FILE + read _LINE _FN _FILE <<< "`caller 1`" + if [ -t 1 ]; then + >&2 echo -e "\033[1m\033[31m$@ in ${_FILE} on line ${_LINE}\033[0m" + else + >&2 echo "$@ in ${_FILE} on line ${_LINE}" + fi +} +else +function mason_error { + if [ -t 1 ]; then + >&2 echo -e "\033[1m\033[31m$@\033[0m" + else + >&2 echo "$@" + fi +} +fi + +function mason_info { + if [ -t 1 ]; then + >&2 echo -e "\033[1m\033[36m$@\033[0m" + else + >&2 echo "$@" + fi +} + +function mason_detect_platform { + # Determine platform + if [[ -z "${MASON_PLATFORM:-}" ]]; then + if [[ "`uname -s`" = 'Darwin' ]]; then + MASON_PLATFORM="osx" + else + MASON_PLATFORM="linux" + fi + fi + + # Determine platform version string + if [[ -z "${MASON_PLATFORM_VERSION:-}" ]]; then + MASON_PLATFORM_VERSION="`uname -m`" + fi +} + +function mason_trim { + local _TMP="${1#"${1%%[![:space:]]*}"}" + echo -n "${_TMP%"${_TMP##*[![:space:]]}"}" +} + +function mason_uppercase { + echo -n "$1" | tr "[a-z]" "[A-Z]" +} + +function mason_use { + local _HEADER_ONLY=false _PACKAGE _SAFE_PACKAGE _VERSION _PLATFORM_ID _SLUG _INSTALL_PATH _INSTALL_PATH_RELATIVE + + while [[ $# -gt 0 ]]; do + if [[ $1 == "--header-only" ]]; then + _HEADER_ONLY=true + elif [[ -z "${_PACKAGE:-}" ]]; then + _PACKAGE="$1" + elif [[ -z "${_VERSION:-}" ]]; then + _VERSION="$1" + else + mason_error "[Mason] mason_use() called with unrecognized arguments: '$@'" + exit 1 + fi + shift + done + + if [[ -z "${_PACKAGE:-}" ]]; then + mason_error "[Mason] No package name given" + exit 1 + fi + + # Create a package name that we can use as shell variable names. + _SAFE_PACKAGE="${_PACKAGE//[![:alnum:]]/_}" + + if [[ -z "${_VERSION:-}" ]]; then + mason_error "[Mason] Specifying a version is required" + exit 1 + fi + + _PLATFORM_ID="${MASON_PLATFORM}-${MASON_PLATFORM_VERSION}" + if [[ "${_HEADER_ONLY}" = true ]] ; then + _PLATFORM_ID="headers" + fi + + _SLUG="${_PLATFORM_ID}/${_PACKAGE}/${_VERSION}" + _INSTALL_PATH="${MASON_PACKAGE_DIR}/${_SLUG}" + _INSTALL_PATH_RELATIVE="${_INSTALL_PATH#`pwd`/}" + + if [[ ! -d "${_INSTALL_PATH}" ]]; then + local _CACHE_PATH _URL _CACHE_DIR _ERROR + _CACHE_PATH="${MASON_PACKAGE_DIR}/.binaries/${_SLUG}.tar.gz" + if [ ! -f "${_CACHE_PATH}" ]; then + # Download the package + _URL="${MASON_REPOSITORY}/${_SLUG}.tar.gz" + mason_info "[Mason] Downloading package ${_URL}..." + _CACHE_DIR="`dirname "${_CACHE_PATH}"`" + mkdir -p "${_CACHE_DIR}" + if ! _ERROR=$(curl --retry 3 --silent --fail --show-error --location "${_URL}" --output "${_CACHE_PATH}.tmp" 2>&1); then + mason_error "[Mason] ${_ERROR}" + exit 1 + else + # We downloaded to a temporary file to prevent half-finished downloads + mv "${_CACHE_PATH}.tmp" "${_CACHE_PATH}" + fi + fi + + # Unpack the package + mason_info "[Mason] Unpacking package to ${_INSTALL_PATH_RELATIVE}..." + mkdir -p "${_INSTALL_PATH}" + tar xzf "${_CACHE_PATH}" -C "${_INSTALL_PATH}" + fi + + # Error out if there is no config file. + if [[ ! -f "${_INSTALL_PATH}/mason.ini" ]]; then + mason_error "[Mason] Could not find mason.ini for package ${_PACKAGE} ${_VERSION}" + exit 1 + fi + + # We use this instead of declare, since it declare makes local variables when run in a function. + read "MASON_PACKAGE_${_SAFE_PACKAGE}_PREFIX" <<< "${_INSTALL_PATH}" + + # Load the configuration from the ini file + local _LINE _KEY _VALUE + while read _LINE; do + _KEY="`mason_trim "${_LINE%%=*}"`" + if [[ "${_KEY}" =~ ^[a-z_]+$ ]]; then + _KEY="`mason_uppercase "${_KEY}"`" # Convert to uppercase + _LINE="${_LINE%%;*}" # Trim trailing comments + _VALUE="`mason_trim "${_LINE#*=}"`" + _VALUE="${_VALUE//\{prefix\}/${_INSTALL_PATH}}" # Replace {prefix} + read "MASON_PACKAGE_${_SAFE_PACKAGE}_${_KEY}" <<< "${_VALUE}" + fi + done < "${_INSTALL_PATH}/mason.ini" + + # We're using the fact that this variable is declared to pass back the package name we parsed + # from the argument string to avoid polluting the global namespace. + if [ ! -z ${_MASON_SAFE_PACKAGE_NAME+x} ]; then + _MASON_SAFE_PACKAGE_NAME="${_SAFE_PACKAGE}" + fi +} + +function mason_cli { + local _MASON_SAFE_PACKAGE_NAME= _PROP _VAR + if [[ $# -lt 1 ]]; then + mason_error "[Mason] Usage: $0 [--header-only] " + mason_error "[Mason] is one of 'include_dirs', 'definitions', 'options', 'ldflags', 'static_libs', or any custom variables in the package's mason.ini." + exit 1 + fi + + # Store first argument and pass the remaining arguments to mason_use + _PROP="`mason_uppercase "$1"`" + shift + mason_use "$@" + + # Optionally print variables + _VAR="MASON_PACKAGE_${_MASON_SAFE_PACKAGE_NAME}_${_PROP}" + if [[ ! -z "${!_VAR:-}" ]]; then + echo "${!_VAR}" + fi +} + +# Directory where Mason packages are located; typically ends with mason_packages +if [[ -z "${MASON_PACKAGE_DIR:-}" ]]; then + MASON_PACKAGE_DIR="`pwd`/mason_packages" +fi + +# URL prefix of where packages are located. +if [[ -z "${MASON_REPOSITORY:-}" ]]; then + MASON_REPOSITORY="https://mason-binaries.s3.amazonaws.com" +fi + +mason_detect_platform + +# Print variables if this shell script is invoked directly. +if [[ "$0" = "$BASH_SOURCE" ]]; then + set -euo pipefail + mason_cli "$@" +fi diff --git a/scripts/setup_mason.sh b/scripts/setup_mason.sh new file mode 100755 index 000000000..bd55f99c9 --- /dev/null +++ b/scripts/setup_mason.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -eu +set -o pipefail + +# we pin the mason version to avoid changes in mason breaking builds +MASON_VERSION="751b5c5d" + +function setup_mason() { + mkdir -p ./mason + curl -sSfL https://github.com/mapbox/mason/archive/${MASON_VERSION}.tar.gz | tar --gunzip --extract --strip-components=1 --exclude="*md" --exclude="test*" --directory=./mason + export MASON_HOME=$(pwd)/mason_packages/.link + export PATH=$(pwd)/mason:${PATH} + export CXX=${CXX:-clang++} + export CC=${CC:-clang} +} + + +setup_mason + +set +eu +set +o pipefail diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index f4ca59d33..000000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[nosetests] -verbosity=1 diff --git a/setup.py b/setup.py index 85757e6ca..a72565629 100755 --- a/setup.py +++ b/setup.py @@ -1,228 +1,153 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 -from distutils import sysconfig -from setuptools import setup, Extension -import os -import subprocess +from pybind11.setup_helpers import Pybind11Extension, build_ext +from setuptools import setup, find_namespace_packages import sys -import shutil -import re - -cflags = sysconfig.get_config_var('CFLAGS') -sysconfig._config_vars['CFLAGS'] = re.sub(' +', ' ', cflags.replace('-g', '').replace('-Os', '').replace('-arch i386', '')) -opt = sysconfig.get_config_var('OPT') -sysconfig._config_vars['OPT'] = re.sub(' +', ' ', opt.replace('-g', '').replace('-Os', '')) -ldshared = sysconfig.get_config_var('LDSHARED') -sysconfig._config_vars['LDSHARED'] = re.sub(' +', ' ', ldshared.replace('-g', '').replace('-Os', '').replace('-arch i386', '')) -ldflags = sysconfig.get_config_var('LDFLAGS') -sysconfig._config_vars['LDFLAGS'] = re.sub(' +', ' ', ldflags.replace('-g', '').replace('-Os', '').replace('-arch i386', '')) -pycflags = sysconfig.get_config_var('PY_CFLAGS') -sysconfig._config_vars['PY_CFLAGS'] = re.sub(' +', ' ', pycflags.replace('-g', '').replace('-Os', '').replace('-arch i386', '')) -sysconfig._config_vars['CFLAGSFORSHARED'] = '' -os.environ['ARCHFLAGS'] = '' - -if os.environ.get("MASON_BUILD", "false") == "true": - # run bootstrap.sh to get mason builds - subprocess.call(['./bootstrap.sh']) - mapnik_config = 'mason_packages/.link/bin/mapnik-config' - mason_build = True -else: - mapnik_config = 'mapnik-config' - mason_build = False - -boost_python_lib = os.environ.get("BOOST_PYTHON_LIB", 'boost_python-mt') -boost_system_lib = os.environ.get("BOOST_SYSTEM_LIB", 'boost_system-mt') -boost_thread_lib = os.environ.get("BOOST_THREAD_LIB", 'boost_thread-mt') - -try: - linkflags = subprocess.check_output([mapnik_config, '--libs']).rstrip('\n').split(' ') - lib_path = linkflags[0][2:] - linkflags.extend(subprocess.check_output([mapnik_config, '--ldflags']).rstrip('\n').split(' ')) -except: - raise Exception("Failed to find proper linking flags from mapnik config"); - -## Dynamically make the mapnik/paths.py file if it doesn't exist. -if os.path.isfile('mapnik/paths.py'): - create_paths = False -else: - create_paths = True - f_paths = open('mapnik/paths.py', 'w') - f_paths.write('import os\n') - f_paths.write('\n') - -if mason_build: - try: - if sys.platform == 'darwin': - base_f = 'libmapnik.dylib' - else: - base_f = 'libmapnik.so.3.0' - f = os.path.join(lib_path, base_f) - shutil.copyfile(f, os.path.join('mapnik', base_f)) - except shutil.Error: - pass - input_plugin_path = subprocess.check_output([mapnik_config, '--input-plugins']).rstrip('\n') - input_plugin_files = os.listdir(input_plugin_path) - input_plugin_files = [os.path.join(input_plugin_path, f) for f in input_plugin_files] - if not os.path.exists(os.path.join('mapnik','plugins','input')): - os.makedirs(os.path.join('mapnik','plugins', 'input')) - for f in input_plugin_files: - try: - shutil.copyfile(f, os.path.join('mapnik', 'plugins', 'input', os.path.basename(f))) - except shutil.Error: - pass - font_path = subprocess.check_output([mapnik_config, '--fonts']).rstrip('\n') - font_files = os.listdir(font_path) - font_files = [os.path.join(font_path, f) for f in font_files] - if not os.path.exists(os.path.join('mapnik','plugins','fonts')): - os.makedirs(os.path.join('mapnik','plugins','fonts')) - for f in font_files: - try: - shutil.copyfile(f, os.path.join('mapnik','plugins','fonts', os.path.basename(f))) - except shutil.Error: - pass - if create_paths: - f_paths.write('mapniklibpath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins")\n') -elif create_paths: - f_paths.write("mapniklibpath = '"+lib_path+"/mapnik'\n") - f_paths.write('mapniklibpath = os.path.normpath(mapniklibpath)\n') - -if create_paths: - f_paths.write("inputpluginspath = os.path.join(mapniklibpath,'input')\n") - f_paths.write("fontscollectionpath = os.path.join(mapniklibpath,'fonts')\n") - f_paths.write("__all__ = [mapniklibpath,inputpluginspath,fontscollectionpath]\n") - f_paths.close() - - -if not mason_build: - icu_path = subprocess.check_output([mapnik_config, '--icu-data']).rstrip('\n') -else: - icu_path = 'mason_packages/.link/share/icu/' -if icu_path: - icu_files = os.listdir(icu_path) - icu_files = [os.path.join(icu_path, f) for f in icu_files] - if not os.path.exists(os.path.join('mapnik','plugins','icu')): - os.makedirs(os.path.join('mapnik','plugins','icu')) - for f in icu_files: - try: - shutil.copyfile(f, os.path.join('mapnik','plugins','icu', os.path.basename(f))) - except shutil.Error: - pass - -if not mason_build: - gdal_path = subprocess.check_output([mapnik_config, '--gdal-data']).rstrip('\n') -else: - gdal_path = 'mason_packages/.link/share/gdal/' - if os.path.exists('mason_packages/.link/share/gdal/gdal/'): - gdal_path = 'mason_packages/.link/share/gdal/gdal/' -if gdal_path: - gdal_files = os.listdir(gdal_path) - gdal_files = [os.path.join(gdal_path, f) for f in gdal_files] - if not os.path.exists(os.path.join('mapnik','plugins','gdal')): - os.makedirs(os.path.join('mapnik','plugins','gdal')) - for f in gdal_files: - try: - shutil.copyfile(f, os.path.join('mapnik','plugins','gdal', os.path.basename(f))) - except shutil.Error: - pass +import subprocess +import os -if not mason_build: - proj_path = subprocess.check_output([mapnik_config, '--proj-lib']).rstrip('\n') +mapnik_config = 'mapnik-config' + +def check_output(args): + output = subprocess.check_output(args).decode() + return output.rstrip('\n') + +linkflags = [] +bin_path = os.path.join(check_output([mapnik_config, '--prefix']),'bin') +lib_path = os.path.join(check_output([mapnik_config, '--prefix']),'lib') +linkflags.extend(check_output([mapnik_config, '--libs']).split(' ')) +linkflags.extend(check_output([mapnik_config, '--ldflags']).split(' ')) +linkflags.extend(check_output([mapnik_config, '--dep-libs']).split(' ')) +linkflags.extend([ + '-lmapnik-wkt', + '-lmapnik-json', +]) + +# Remove symlinks +if os.path.islink('packaging/mapnik/bin') : + os.unlink('packaging/mapnik/bin') +if os.path.islink('packaging/mapnik/lib') : + os.unlink('packaging/mapnik/lib') +# Dynamically make the mapnik/paths.py file +f_paths = open('packaging/mapnik/paths.py', 'w') +f_paths.write('import os\n') +f_paths.write('\n') + +if os.environ.get('SYSTEM_MAPNIK'): + input_plugin_path = check_output([mapnik_config, '--input-plugins']) + font_path = check_output([mapnik_config, '--fonts']) + f_paths.write("mapniklibpath = '{path}'\n".format(path=lib_path)) + f_paths.write("inputpluginspath = '{path}'\n".format(path=input_plugin_path)) + f_paths.write("fontscollectionpath = '{path}'\n".format(path=font_path)) else: - proj_path = 'mason_packages/.link/share/proj/' - if os.path.exists('mason_packages/.link/share/proj/proj/'): - proj_path = 'mason_packages/.link/share/proj/proj/' -if proj_path: - proj_files = os.listdir(proj_path) - proj_files = [os.path.join(proj_path, f) for f in proj_files] - if not os.path.exists(os.path.join('mapnik','plugins','proj')): - os.makedirs(os.path.join('mapnik','plugins','proj')) - for f in proj_files: - try: - shutil.copyfile(f, os.path.join('mapnik','plugins','proj', os.path.basename(f))) - except shutil.Error: - pass - -extra_comp_args = subprocess.check_output([mapnik_config, '--cflags']).rstrip('\n').split(' ') + if not os.path.exists('packaging/mapnik/bin'): + os.symlink(bin_path, 'packaging/mapnik/bin') + if not os.path.exists('packaging/mapnik/lib') : + os.symlink(lib_path, 'packaging/mapnik/lib') + else: + names = (name for name in os.listdir(lib_path) if os.path.isfile(os.path.join(lib_path, name))) + for name in names: + if not os.path.exists(os.path.join('packaging/mapnik/lib', name)): + os.symlink(os.path.join(lib_path, name), os.path.join('packaging/mapnik/lib', name)) + input_plugin_path = check_output([mapnik_config, '--input-plugins']) + if not os.path.exists('packaging/mapnik/lib/mapnik/input'): + os.symlink(input_plugin_path, 'packaging/mapnik/lib/mapnik/input') + f_paths.write("mapniklibpath = os.path.join(os.path.dirname(__file__), 'lib')\n") + f_paths.write("inputpluginspath = os.path.join(os.path.dirname(__file__), 'lib/mapnik/input')\n") + f_paths.write("fontscollectionpath = os.path.join(os.path.dirname(__file__), 'lib/mapnik/fonts')\n") + +f_paths.write("__all__ = [mapniklibpath,inputpluginspath,fontscollectionpath]\n") +f_paths.close() + +extra_comp_args = check_output([mapnik_config, '--cflags']).split(' ') +extra_comp_args = list(filter(lambda arg: arg != "-fvisibility=hidden", extra_comp_args)) if sys.platform == 'darwin': - extra_comp_args.append('-mmacosx-version-min=10.8') - linkflags.append('-mmacosx-version-min=10.8') + pass else: - linkflags.append('-lrt') - linkflags.append('-Wl,-z,origin') - linkflags.append('-Wl,-rpath=$ORIGIN') - -if os.environ.get("CC",False) == False: - os.environ["CC"] = subprocess.check_output([mapnik_config, '--cxx']).rstrip('\n') -if os.environ.get("CXX",False) == False: - os.environ["CXX"] = subprocess.check_output([mapnik_config, '--cxx']).rstrip('\n') + linkflags.append('-lrt') + linkflags.append('-Wl,-z,origin') + linkflags.append('-Wl,-rpath=$ORIGIN/lib') + + +ext_modules = [ + Pybind11Extension( + "mapnik._mapnik", + [ + "src/mapnik_python.cpp", + "src/mapnik_layer.cpp", + "src/mapnik_query.cpp", + "src/mapnik_map.cpp", + "src/mapnik_color.cpp", + "src/mapnik_composite_modes.cpp", + "src/mapnik_coord.cpp", + "src/mapnik_envelope.cpp", + "src/mapnik_expression.cpp", + "src/mapnik_datasource.cpp", + "src/mapnik_datasource_cache.cpp", + "src/mapnik_gamma_method.cpp", + "src/mapnik_geometry.cpp", + "src/mapnik_feature.cpp", + "src/mapnik_featureset.cpp", + "src/mapnik_font_engine.cpp", + "src/mapnik_fontset.cpp", + "src/mapnik_grid.cpp", + "src/mapnik_grid_view.cpp", + "src/mapnik_image.cpp", + "src/mapnik_image_view.cpp", + "src/mapnik_projection.cpp", + "src/mapnik_proj_transform.cpp", + "src/mapnik_rule.cpp", + "src/mapnik_symbolizer.cpp", + "src/mapnik_debug_symbolizer.cpp", + "src/mapnik_markers_symbolizer.cpp", + "src/mapnik_polygon_symbolizer.cpp", + "src/mapnik_polygon_pattern_symbolizer.cpp", + "src/mapnik_line_symbolizer.cpp", + "src/mapnik_line_pattern_symbolizer.cpp", + "src/mapnik_point_symbolizer.cpp", + "src/mapnik_raster_symbolizer.cpp", + "src/mapnik_scaling_method.cpp", + "src/mapnik_style.cpp", + "src/mapnik_logger.cpp", + "src/mapnik_placement_finder.cpp", + "src/mapnik_text_symbolizer.cpp", + "src/mapnik_palette.cpp", + "src/mapnik_parameters.cpp", + "src/python_grid_utils.cpp", + "src/mapnik_raster_colorizer.cpp", + "src/mapnik_label_collision_detector.cpp", + "src/mapnik_dot_symbolizer.cpp", + "src/mapnik_building_symbolizer.cpp", + "src/mapnik_shield_symbolizer.cpp", + "src/mapnik_group_symbolizer.cpp" + ], + extra_compile_args=extra_comp_args, + extra_link_args=linkflags, + ) +] + +if os.environ.get("CC", False) == False: + os.environ["CC"] = check_output([mapnik_config, '--cxx']) +if os.environ.get("CXX", False) == False: + os.environ["CXX"] = check_output([mapnik_config, '--cxx']) setup( - name = "mapnik", - version = "0.1", - packages = ['mapnik'], - author = "Blake Thompson", - author_email = "flippmoke@gmail.com", - description = "Python bindings for Mapnik", - license = "GNU LESSER GENERAL PUBLIC LICENSE", - keywords = "mapnik mapbox mapping carteography", - url = "http://mapnik.org/", - tests_require = [ - 'nose', - ], - package_data = { - 'mapnik': ['libmapnik.*', 'plugins/*/*'], - }, - test_suite = 'nose.collector', - ext_modules = [ - Extension('mapnik._mapnik', [ - 'src/mapnik_color.cpp', - 'src/mapnik_coord.cpp', - 'src/mapnik_datasource.cpp', - 'src/mapnik_datasource_cache.cpp', - 'src/mapnik_envelope.cpp', - 'src/mapnik_expression.cpp', - 'src/mapnik_feature.cpp', - 'src/mapnik_featureset.cpp', - 'src/mapnik_font_engine.cpp', - 'src/mapnik_fontset.cpp', - 'src/mapnik_gamma_method.cpp', - 'src/mapnik_geometry.cpp', - 'src/mapnik_grid.cpp', - 'src/mapnik_grid_view.cpp', - 'src/mapnik_image.cpp', - 'src/mapnik_image_view.cpp', - 'src/mapnik_label_collision_detector.cpp', - 'src/mapnik_layer.cpp', - 'src/mapnik_logger.cpp', - 'src/mapnik_map.cpp', - 'src/mapnik_palette.cpp', - 'src/mapnik_parameters.cpp', - 'src/mapnik_proj_transform.cpp', - 'src/mapnik_projection.cpp', - 'src/mapnik_python.cpp', - 'src/mapnik_query.cpp', - 'src/mapnik_raster_colorizer.cpp', - 'src/mapnik_rule.cpp', - 'src/mapnik_scaling_method.cpp', - 'src/mapnik_style.cpp', - 'src/mapnik_svg_generator_grammar.cpp', - 'src/mapnik_symbolizer.cpp', - 'src/mapnik_text_placement.cpp', - 'src/mapnik_view_transform.cpp', - 'src/python_grid_utils.cpp', - ], - language='c++', - libraries = [ - 'mapnik', - 'mapnik-wkt', - 'mapnik-json', - boost_python_lib, - boost_thread_lib, - boost_system_lib - ], - extra_compile_args = extra_comp_args, - extra_link_args = linkflags, - ) - ] + name="mapnik", + include_package_data=True, + packages=find_namespace_packages(where="packaging"), + package_dir={"": "packaging"}, + package_data={ + "mapnik.include": ["*.hpp"], + "mapnik.bin": ["*"], + "mapnik.lib": ["libmapnik*"], + "mapnik.lib.mapnik.fonts":["*"], + "mapnik.lib.mapnik.input":["*.input"] + }, + exclude_package_data={ + "mapnik.bin": ["mapnik-config"], + "mapnik.lib": ["*.a"] + }, + ext_modules=ext_modules, + cmdclass={"build_ext": build_ext}, ) diff --git a/src/create_datasource.hpp b/src/create_datasource.hpp new file mode 100644 index 000000000..b6b5bfc0d --- /dev/null +++ b/src/create_datasource.hpp @@ -0,0 +1,68 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +#ifndef MAPNIK_CREATE_DATASOURCE_HPP +#define MAPNIK_CREATE_DATASOURCE_HPP + +// mapnik +#include +#include +//pybind11 +#include +#include +#include + +namespace py = pybind11; + +inline std::shared_ptr create_datasource(py::kwargs const& kwargs) +{ + mapnik::parameters params; + for (auto param : kwargs) + { + std::string key = std::string(py::str(param.first)); + py::handle handle = param.second; + if (py::isinstance(handle)) + { + params[key] = handle.cast(); + } + else if (py::isinstance(handle)) + { + params[key] = handle.cast(); + } + else if (py::isinstance(handle)) + { + params[key] = handle.cast(); + } + else if (py::isinstance(handle)) + { + params[key] = handle.cast(); + } + else + { + params[key] = py::str(handle).cast(); + } + } + return mapnik::datasource_cache::instance().create(params); +} + + +#endif //MAPNIK_CREATE_DATASOURCE_HPP diff --git a/src/mapnik_building_symbolizer.cpp b/src/mapnik_building_symbolizer.cpp new file mode 100644 index 000000000..45c91c42a --- /dev/null +++ b/src/mapnik_building_symbolizer.cpp @@ -0,0 +1,59 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_building_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::building_symbolizer; + + py::class_(m, "BuildingSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("fill", + &get_property, + &set_color_property, + "Fill - mapnik.Color, CSS color string or a valid mapnik.Expression") + + .def_property("fill_opacity", + &get_property, + &set_double_property, + "Fill opacity - [0-1] or a valid mapnik.Expression") + + .def_property("height", + &get_property, + &set_double_property, + "Height - a numeric value or a valid mapnik.Expression") + ; + +} diff --git a/src/mapnik_color.cpp b/src/mapnik_color.cpp index 4ab765e0b..bf744be36 100644 --- a/src/mapnik_color.cpp +++ b/src/mapnik_color.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,101 +20,78 @@ * *****************************************************************************/ -#include -#include "boost_std_shared_shim.hpp" - -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - //mapnik +#include #include +//pybind11 +#include +#include - +namespace py = pybind11; using mapnik::color; -struct color_pickle_suite : boost::python::pickle_suite +void export_color (py::module const& m) { - static boost::python::tuple - getinitargs(const color& c) - { - using namespace boost::python; - return boost::python::make_tuple(c.red(),c.green(),c.blue(),c.alpha()); - } -}; + py::class_(m, "Color") + .def(py::init(), + "Creates a new color from its RGB components\n" + "and an alpha value.\n" + "All values between 0 and 255.\n", + py::arg("r"), py::arg("g"), py::arg("b"), py::arg("a")) + .def(py::init(), + "Creates a new color from its RGB components\n" + "and an alpha value.\n" + "All values between 0 and 255.\n", + py::arg("r"), py::arg("g"), py::arg("b"), py::arg("a"), py::arg("premultiplied")) + .def(py::init(), + "Creates a new color from its RGB components.\n" + "All values between 0 and 255.\n", + py::arg("r"), py::arg("g"), py::arg("b")) + .def(py::init(), + "Creates a new color from an unsigned integer.\n" + "All values between 0 and 2^32-1\n", + py::arg("val")) + .def(py::init(), + "Creates a new color from an unsigned integer.\n" + "All values between 0 and 2^32-1\n", + py::arg("val"), py::arg("premultiplied")) -void export_color () -{ - using namespace boost::python; - class_("Color", init( - ( arg("r"), arg("g"), arg("b"), arg("a") ), - "Creates a new color from its RGB components\n" - "and an alpha value.\n" - "All values between 0 and 255.\n") - ) - .def(init( - ( arg("r"), arg("g"), arg("b"), arg("a"), arg("premultiplied") ), - "Creates a new color from its RGB components\n" - "and an alpha value.\n" - "All values between 0 and 255.\n") - ) - .def(init( - ( arg("r"), arg("g"), arg("b") ), - "Creates a new color from its RGB components.\n" - "All values between 0 and 255.\n") - ) - .def(init( - ( arg("val") ), - "Creates a new color from an unsigned integer.\n" - "All values between 0 and 2^32-1\n") - ) - .def(init( - ( arg("val"), arg("premultiplied") ), - "Creates a new color from an unsigned integer.\n" - "All values between 0 and 2^32-1\n") - ) - .def(init( - ( arg("color_string") ), - "Creates a new color from its CSS string representation.\n" - "The string may be a CSS color name (e.g. 'blue')\n" - "or a hex color string (e.g. '#0000ff').\n") - ) - .def(init( - ( arg("color_string"), arg("premultiplied") ), - "Creates a new color from its CSS string representation.\n" - "The string may be a CSS color name (e.g. 'blue')\n" - "or a hex color string (e.g. '#0000ff').\n") - ) - .add_property("r", + .def(py::init(), + "Creates a new color from its CSS string representation.\n" + "The string may be a CSS color name (e.g. 'blue')\n" + "or a hex color string (e.g. '#0000ff').\n", + py::arg("color_string")) + + .def(py::init(), + "Creates a new color from its CSS string representation.\n" + "The string may be a CSS color name (e.g. 'blue')\n" + "or a hex color string (e.g. '#0000ff').\n", + py::arg("color_string"), py::arg("premultiplied")) + + .def_property("r", &color::red, &color::set_red, "Gets or sets the red component.\n" "The value is between 0 and 255.\n") - .add_property("g", + .def_property("g", &color::green, &color::set_green, "Gets or sets the green component.\n" "The value is between 0 and 255.\n") - .add_property("b", + .def_property("b", &color::blue, &color::set_blue, "Gets or sets the blue component.\n" "The value is between 0 and 255.\n") - .add_property("a", + .def_property("a", &color::alpha, &color::set_alpha, "Gets or sets the alpha component.\n" "The value is between 0 and 255.\n") - .def(self == self) - .def(self != self) - .def_pickle(color_pickle_suite()) + .def(py::self == py::self) + .def(py::self != py::self) .def("__str__",&color::to_string) + .def("__repr__",&color::to_string) .def("set_premultiplied",&color::set_premultiplied) .def("get_premultiplied",&color::get_premultiplied) .def("premultiply",&color::premultiply) @@ -127,5 +104,18 @@ void export_color () ">>> c = Color('blue')\n" ">>> c.to_hex_string()\n" "'#0000ff'\n") + .def(py::pickle( + [](color & c) { + return py::make_tuple(c.red(), c.green(), c.blue(), c.alpha()); + }, + [](py::tuple t) { + if (t.size() != 4) + throw std::runtime_error("Invalid state"); + color c{t[0].cast(), + t[1].cast(), + t[2].cast(), + t[3].cast()}; + return c; + })) ; } diff --git a/src/mapnik_composite_modes.cpp b/src/mapnik_composite_modes.cpp new file mode 100644 index 000000000..65494ddf1 --- /dev/null +++ b/src/mapnik_composite_modes.cpp @@ -0,0 +1,74 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +//pybind11 +#include +#include + +namespace py = pybind11; + +void export_composite_modes(py::module const& m) +{ + // NOTE: must match list in include/mapnik/image_compositing.hpp + py::native_enum(m, "CompositeOp", "enum.Enum") + .value("clear", mapnik::clear) + .value("src", mapnik::src) + .value("dst", mapnik::dst) + .value("src_over", mapnik::src_over) + .value("dst_over", mapnik::dst_over) + .value("src_in", mapnik::src_in) + .value("dst_in", mapnik::dst_in) + .value("src_out", mapnik::src_out) + .value("dst_out", mapnik::dst_out) + .value("src_atop", mapnik::src_atop) + .value("dst_atop", mapnik::dst_atop) + .value("xor", mapnik::_xor) + .value("plus", mapnik::plus) + .value("minus", mapnik::minus) + .value("multiply", mapnik::multiply) + .value("screen", mapnik::screen) + .value("overlay", mapnik::overlay) + .value("darken", mapnik::darken) + .value("lighten", mapnik::lighten) + .value("color_dodge", mapnik::color_dodge) + .value("color_burn", mapnik::color_burn) + .value("hard_light", mapnik::hard_light) + .value("soft_light", mapnik::soft_light) + .value("difference", mapnik::difference) + .value("exclusion", mapnik::exclusion) + .value("contrast", mapnik::contrast) + .value("invert", mapnik::invert) + .value("grain_merge", mapnik::grain_merge) + .value("grain_extract", mapnik::grain_extract) + .value("hue", mapnik::hue) + .value("saturation", mapnik::saturation) + .value("color", mapnik::_color) + .value("value", mapnik::_value) + .value("linear_dodge", mapnik::linear_dodge) + .value("linear_burn", mapnik::linear_burn) + .value("divide", mapnik::divide) + .finalize() + ; +} diff --git a/src/mapnik_coord.cpp b/src/mapnik_coord.cpp index 13b89611a..93249c34c 100644 --- a/src/mapnik_coord.cpp +++ b/src/mapnik_coord.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -19,56 +19,46 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ -#include -#include "boost_std_shared_shim.hpp" - -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - // mapnik +#include #include +//pybind11 +#include +#include +namespace py = pybind11; using mapnik::coord; -struct coord_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const coord& c) - { - using namespace boost::python; - return boost::python::make_tuple(c.x,c.y); - } -}; - -void export_coord() +void export_coord(py::module const& m) { - using namespace boost::python; - class_ >("Coord",init( - // class docstring is in mapnik/__init__.py, class _Coord - (arg("x"), arg("y")), - "Constructs a new point with the given coordinates.\n") - ) - .def_pickle(coord_pickle_suite()) + py::class_ >(m, "Coord") + .def(py::init(), + // class docstring is in mapnik/__init__.py, class _Coord + "Constructs a new object with the given coordinates.\n", + py::arg("x"), py::arg("y")) .def_readwrite("x", &coord::x, "Gets or sets the x/lon coordinate of the point.\n") .def_readwrite("y", &coord::y, "Gets or sets the y/lat coordinate of the point.\n") - .def(self == self) // __eq__ - .def(self + self) // __add__ - .def(self + float()) - .def(float() + self) - .def(self - self) // __sub__ - .def(self - float()) - .def(self * float()) //__mult__ - .def(float() * self) - .def(self / float()) // __div__ + .def(py::self == py::self) // __eq__ + .def(py::self + py::self) //__add__ + .def(py::self + float()) + .def(float() + py::self) + .def(py::self - py::self) //__sub__ + .def(py::self - float()) + .def(py::self * float()) //__mult__ + .def(float() * py::self) + .def(py::self / float()) // __div__ + .def(py::pickle( + [](coord & c) { + return py::make_tuple(c.x, c.y); + }, + [](py::tuple t) { + if (t.size() != 2) + throw std::runtime_error("Invalid state"); + coord c{t[0].cast(),t[1].cast()}; + return c; + })) ; } diff --git a/src/mapnik_datasource.cpp b/src/mapnik_datasource.cpp index 41cd79086..8d614a28c 100644 --- a/src/mapnik_datasource.cpp +++ b/src/mapnik_datasource.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,31 +20,22 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - -// stl -#include - // mapnik -#include +#include +#include #include #include #include #include - +#include "mapnik_value_converter.hpp" +#include "create_datasource.hpp" +// stl +#include +//pybind11 +#include +#include +#include +#include using mapnik::datasource; using mapnik::memory_datasource; @@ -52,57 +43,14 @@ using mapnik::layer_descriptor; using mapnik::attribute_descriptor; using mapnik::parameters; +namespace py = pybind11; + namespace { -//user-friendly wrapper that uses Python dictionary -using namespace boost::python; -std::shared_ptr create_datasource(dict const& d) -{ - mapnik::parameters params; - boost::python::list keys=d.keys(); - for (int i=0; i < len(keys); ++i) - { - std::string key = extract(keys[i]); - object obj = d[key]; - if (PyUnicode_Check(obj.ptr())) - { - PyObject* temp = PyUnicode_AsUTF8String(obj.ptr()); - if (temp) - { -#if PY_VERSION_HEX >= 0x03000000 - char* c_str = PyBytes_AsString(temp); -#else - char* c_str = PyString_AsString(temp); -#endif - params[key] = std::string(c_str); - Py_DecRef(temp); - } - continue; - } - extract ex0(obj); - extract ex1(obj); - extract ex2(obj); - if (ex0.check()) - { - params[key] = ex0(); - } - else if (ex1.check()) - { - params[key] = ex1(); - } - else if (ex2.check()) - { - params[key] = ex2(); - } - } - - return mapnik::datasource_cache::instance().create(params); -} - -boost::python::dict describe(std::shared_ptr const& ds) +py::dict describe(std::shared_ptr const& ds) { - boost::python::dict description; + py::dict description; mapnik::layer_descriptor ld = ds->get_descriptor(); description["type"] = ds->type(); description["name"] = ld.get_name(); @@ -110,14 +58,14 @@ boost::python::dict describe(std::shared_ptr const& ds) description["encoding"] = ld.get_encoding(); for (auto const& param : ld.get_extra_parameters()) { - description[param.first] = param.second; + description[py::str(param.first)] = mapnik_param_to_python::convert(param.second); } return description; } -boost::python::list fields(std::shared_ptr const& ds) +py::list fields(std::shared_ptr const& ds) { - boost::python::list flds; + py::list flds; if (ds) { layer_descriptor ld = ds->get_descriptor(); @@ -131,9 +79,9 @@ boost::python::list fields(std::shared_ptr const& ds) } return flds; } -boost::python::list field_types(std::shared_ptr const& ds) +py::list field_types(std::shared_ptr const& ds) { - boost::python::list fld_types; + py::list fld_types; if (ds) { layer_descriptor ld = ds->get_descriptor(); @@ -144,75 +92,99 @@ boost::python::list field_types(std::shared_ptr const& ds) { unsigned type = it->get_type(); if (type == mapnik::Integer) - // this crashes, so send back strings instead - //fld_types.append(boost::python::object(boost::python::handle<>(&PyInt_Type))); - fld_types.append(boost::python::str("int")); + fld_types.append(py::str("int")); else if (type == mapnik::Float) - fld_types.append(boost::python::str("float")); + fld_types.append(py::str("float")); else if (type == mapnik::Double) - fld_types.append(boost::python::str("float")); + fld_types.append(py::str("float")); else if (type == mapnik::String) - fld_types.append(boost::python::str("str")); + fld_types.append(py::str("str")); else if (type == mapnik::Boolean) - fld_types.append(boost::python::str("bool")); + fld_types.append(py::str("bool")); else if (type == mapnik::Geometry) - fld_types.append(boost::python::str("geometry")); + fld_types.append(py::str("geometry")); else if (type == mapnik::Object) - fld_types.append(boost::python::str("object")); + fld_types.append(py::str("object")); else - fld_types.append(boost::python::str("unknown")); + fld_types.append(py::str("unknown")); } } return fld_types; -}} +} + +py::dict parameters_impl(std::shared_ptr const& ds) +{ + auto const params = ds->params(); + py::dict d; + for (auto kv : params) + { + d[py::str(kv.first)] = mapnik_param_to_python::convert(kv.second); + } + return d; +} -mapnik::parameters const& (mapnik::datasource::*params_const)() const = &mapnik::datasource::params; +} // namespace -void export_datasource() +void export_datasource(py::module& m) { - using namespace boost::python; - - enum_("DataType") + py::native_enum(m, "DataType", "enum.Enum") .value("Vector",mapnik::datasource::Vector) .value("Raster",mapnik::datasource::Raster) + .finalize() ; - enum_("DataGeometryType") + py::native_enum(m, "DataGeometryType", "enum.Enum") .value("Point",mapnik::datasource_geometry_t::Point) .value("LineString",mapnik::datasource_geometry_t::LineString) .value("Polygon",mapnik::datasource_geometry_t::Polygon) .value("Collection",mapnik::datasource_geometry_t::Collection) + .finalize() ; - class_, - boost::noncopyable>("Datasource",no_init) - .def("type",&datasource::type) - .def("geometry_type",&datasource::get_geometry_type) - .def("describe",&describe) - .def("envelope",&datasource::envelope) - .def("features",&datasource::features) - .def("fields",&fields) - .def("field_types",&field_types) - .def("features_at_point",&datasource::features_at_point, (arg("coord"),arg("tolerance")=0)) - .def("params",make_function(params_const,return_value_policy()), + py::class_> (m, "Datasource") + .def(py::init([] (py::kwargs const& kwargs) { return create_datasource(kwargs);})) + .def("type", &datasource::type) + .def("geometry_type", &datasource::get_geometry_type) + .def("describe", &describe) + .def("envelope", &datasource::envelope) + .def("features", &datasource::features) + .def("fields" ,&fields) + .def("field_types", &field_types) + .def("features_at_point", &datasource::features_at_point, py::arg("coord"), py::arg("tolerance") = 0) + .def("parameters", ¶meters_impl, "The configuration parameters of the data source. " "These vary depending on the type of data source.") - .def(self == self) + .def(py::self == py::self) + .def("__iter__", + [](datasource const& ds) { + mapnik::query q(ds.envelope()); + layer_descriptor ld = ds.get_descriptor(); + std::vector const& desc_ar = ld.get_descriptors(); + for (auto const& desc : desc_ar) + { + q.add_property_name(desc.get_name()); + } + return ds.features(q); + }, + py::keep_alive<0, 1>()) ; - def("CreateDatasource",&create_datasource); + m.def("CreateDatasource",&create_datasource); - class_, std::shared_ptr, - boost::noncopyable>("MemoryDatasourceBase", init()) - .def("add_feature",&memory_datasource::push, + py::class_> + (m, "MemoryDatasource") + .def(py::init([]() { + mapnik::parameters p; + p.insert(std::make_pair("type","memory")); + return std::make_shared(p);})) + .def("add_feature", &memory_datasource::push, "Adds a Feature:\n" ">>> ms = MemoryDatasource()\n" - ">>> feature = Feature(1)\n" - ">>> ms.add_feature(Feature(1))\n") - .def("num_features",&memory_datasource::size) + ">>> feature = Feature(Context(),1)\n" + ">>> ms.add_feature(f)\n") + .def("num_features", &memory_datasource::size) ; - implicitly_convertible,std::shared_ptr >(); + py::implicitly_convertible(); } diff --git a/src/mapnik_datasource_cache.cpp b/src/mapnik_datasource_cache.cpp index 54399916e..82fde280f 100644 --- a/src/mapnik_datasource_cache.cpp +++ b/src/mapnik_datasource_cache.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,86 +20,45 @@ * *****************************************************************************/ +// mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" -#include -#include -#pragma GCC diagnostic pop - -#include +#include #include #include #include +#include "create_datasource.hpp" +//pybind11 +#include +#include -namespace { +namespace py = pybind11; -using namespace boost::python; +namespace { -std::shared_ptr create_datasource(const dict& d) +bool register_datasources(std::string const& plugins_dir, bool recursive = false) { - mapnik::parameters params; - boost::python::list keys=d.keys(); - for (int i=0; i(keys[i]); - object obj = d[key]; - extract ex0(obj); - extract ex1(obj); - extract ex2(obj); - - if (ex0.check()) - { - params[key] = ex0(); - } - else if (ex1.check()) - { - params[key] = ex1(); - } - else if (ex2.check()) - { - params[key] = ex2(); - } - } - - return mapnik::datasource_cache::instance().create(params); + return mapnik::datasource_cache::instance().register_datasources(plugins_dir, recursive); } -void register_datasources(std::string const& path) +std::string plugin_directories() { - mapnik::datasource_cache::instance().register_datasources(path); + return mapnik::datasource_cache::instance().plugin_directories(); } std::vector plugin_names() { - return mapnik::datasource_cache::instance().plugin_names(); + return mapnik::datasource_cache::instance().plugin_names(); } -std::string plugin_directories() -{ - return mapnik::datasource_cache::instance().plugin_directories(); -} +} // namespace -} -void export_datasource_cache() +void export_datasource_cache(py::module const& m) { - using mapnik::datasource_cache; - class_("DatasourceCache",no_init) - .def("create",&create_datasource) - .staticmethod("create") - .def("register_datasources",®ister_datasources) - .staticmethod("register_datasources") - .def("plugin_names",&plugin_names) - .staticmethod("plugin_names") - .def("plugin_directories",&plugin_directories) - .staticmethod("plugin_directories") + py::class_>(m, "DatasourceCache") + .def_static("create",&create_datasource) + .def_static("register_datasources",®ister_datasources) + .def_static("plugin_names",&plugin_names) + .def_static("plugin_directories",&plugin_directories) ; } diff --git a/src/mapnik_debug_symbolizer.cpp b/src/mapnik_debug_symbolizer.cpp new file mode 100644 index 000000000..a2a3f063e --- /dev/null +++ b/src/mapnik_debug_symbolizer.cpp @@ -0,0 +1,56 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include +#include + +namespace py = pybind11; + +void export_debug_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::debug_symbolizer; + using mapnik::debug_symbolizer_mode_enum; + + py::native_enum(m, "debug_symbolizer_mode", "enum.Enum") + .value("COLLISION", debug_symbolizer_mode_enum::DEBUG_SYM_MODE_COLLISION) + .value("VERTEX", debug_symbolizer_mode_enum::DEBUG_SYM_MODE_VERTEX) + .finalize() + ; + + py::class_(m, "DebugSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("mode", + &get, + &set_enum_property) + ; + +} diff --git a/src/mapnik_dot_symbolizer.cpp b/src/mapnik_dot_symbolizer.cpp new file mode 100644 index 000000000..d2702c792 --- /dev/null +++ b/src/mapnik_dot_symbolizer.cpp @@ -0,0 +1,66 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_dot_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::dot_symbolizer; + + py::class_(m, "DotSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("fill", + &get_property, + &set_color_property, + "Fill - mapnik.Color, CSS color string or a valid mapnik.Expression") + .def_property("opacity", + &get_property, + &set_double_property, + "Opacity - [0-1] or a valid mapnik.Expression") + .def_property("width", + &get_property, + &set_double_property, + "Width - a numeric value or a valid mapnik.Expression") + .def_property("height", + &get_property, + &set_double_property, + "Height - a numeric value or a valid mapnik.Expression") + .def_property("comp_op", + &get, + &set_enum_property, + "Composite mode (comp-op)") + + ; + +} diff --git a/src/mapnik_enumeration.hpp b/src/mapnik_enumeration.hpp deleted file mode 100644 index ce2266a6f..000000000 --- a/src/mapnik_enumeration.hpp +++ /dev/null @@ -1,88 +0,0 @@ -/***************************************************************************** - * - * This file is part of Mapnik (c++ mapping toolkit) - * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - *****************************************************************************/ -#ifndef MAPNIK_PYTHON_BINDING_ENUMERATION_INCLUDED -#define MAPNIK_PYTHON_BINDING_ENUMERATION_INCLUDED - -#include // for registered -#include // for enum_ -#include // for implicitly_convertible -#include - -namespace mapnik { - -template -class enumeration_ : - public boost::python::enum_ -{ - // some short cuts - using base_type = boost::python::enum_; - using native_type = typename EnumWrapper::native_type; -public: - enumeration_() : - base_type( EnumWrapper::get_name().c_str() ) - { - init(); - } - enumeration_(const char * python_alias) : - base_type( python_alias ) - { - init(); - } - enumeration_(const char * python_alias, const char * doc) : - base_type( python_alias, doc ) - { - init(); - } - -private: - struct converter - { - static PyObject* convert(EnumWrapper const& v) - { - // Redirect conversion to a static method of our base class's - // base class. A free template converter will not work because - // the base_type::base typedef is protected. - // Lets hope MSVC agrees that this is legal C++ - using namespace boost::python::converter; - return base_type::base::to_python( - registered::converters.m_class_object - , static_cast( v )); - - } - }; - - void init() { - boost::python::implicitly_convertible(); - boost::python::to_python_converter(); - - for (unsigned i = 0; i < EnumWrapper::MAX; ++i) - { - // Register the strings already defined for this enum. - base_type::value( EnumWrapper::get_string( i ), native_type( i ) ); - } - } - -}; - -} // end of namespace mapnik - -#endif // MAPNIK_PYTHON_BINDING_ENUMERATION_INCLUDED diff --git a/src/mapnik_envelope.cpp b/src/mapnik_envelope.cpp index 396405338..4af90c819 100644 --- a/src/mapnik_envelope.cpp +++ b/src/mapnik_envelope.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,35 +20,21 @@ * *****************************************************************************/ +// mapnik #include +#include +#include +//stl +#include +//pybind11 +#include +#include -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" -#include -#pragma GCC diagnostic pop - -// mapnik -#include -#include +namespace py = pybind11; using mapnik::coord; using mapnik::box2d; -struct envelope_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const box2d& e) - { - using namespace boost::python; - return boost::python::make_tuple(e.minx(),e.miny(),e.maxx(),e.maxy()); - } -}; - box2d from_string(std::string const& s) { box2d bbox; @@ -98,36 +84,32 @@ void (box2d::*clip)(box2d const&) = &box2d::clip; // pad void (box2d::*pad)(double) = &box2d::pad; -// deepcopy -box2d box2d_deepcopy(box2d & obj, boost::python::dict const&) -{ - // FIXME::ignore memo for now - box2d result(obj); - return result; -} +// to string + -void export_envelope() +void export_envelope(py::module const& m) { - using namespace boost::python; - class_ >("Box2d", - // class docstring is in mapnik/__init__.py, class _Coord - init( - (arg("minx"),arg("miny"),arg("maxx"),arg("maxy")), - "Constructs a new envelope from the coordinates\n" - "of its lower left and upper right corner points.\n")) - .def(init<>("Equivalent to Box2d(0, 0, -1, -1).\n")) - .def(init&, const coord&>( - (arg("ll"),arg("ur")), - "Equivalent to Box2d(ll.x, ll.y, ur.x, ur.y).\n")) - .def("from_string",from_string) - .staticmethod("from_string") - .add_property("minx", &box2d::minx, + py::class_ >(m, "Box2d") + // class docstring is in mapnik/__init__.py, class _Coord + .def(py::init(), + "Constructs a new envelope from the coordinates\n" + "of its lower left and upper right corner points.\n", + py::arg("minx"),py::arg("miny"),py::arg("maxx"),py::arg("maxy")) + + .def(py::init<>(), "Equivalent to Box2d(INVALID).\n") + + .def(py::init const&, coord const&>(), + "Equivalent to Box2d(ll.x, ll.y, ur.x, ur.y).\n", + py::arg("ll"),py::arg("ur")) + + .def_static("from_string",from_string) + .def_property("minx", &box2d::minx, &box2d::set_minx, "X coordinate for the lower left corner") - .add_property("miny", &box2d::miny, + .def_property("miny", &box2d::miny, &box2d::set_miny, "Y coordinate for the lower left corner") - .add_property("maxx", &box2d::maxx, + .def_property("maxx", &box2d::maxx, &box2d::set_maxx, "X coordinate for the upper right corner") - .add_property("maxy", &box2d::maxy, + .def_property("maxy", &box2d::maxy, &box2d::set_maxy, "Y coordinate for the upper right corner") .def("center", &box2d::center, "Returns the coordinates of the center of the bounding box.\n" @@ -137,7 +119,6 @@ void export_envelope() ">>> e.center()\n" "Coord(50, 50)\n") .def("center", re_center_p1, - (arg("x"), arg("y")), "Moves the envelope so that the given coordinates become its new center.\n" "The width and the height are preserved.\n" "\n " @@ -149,10 +130,9 @@ void export_envelope() ">>> (e.width(), e.height())\n" "(100.0, 100.0)\n" ">>> e\n" - "Box2d(10.0, 10.0, 110.0, 110.0)\n" - ) + "Box2d(10.0, 10.0, 110.0, 110.0)\n", + py::arg("x"), py::arg("y")) .def("center", re_center_p2, - (arg("Coord")), "Moves the envelope so that the given coordinates become its new center.\n" "The width and the height are preserved.\n" "\n " @@ -164,10 +144,9 @@ void export_envelope() ">>> (e.width(), e.height())\n" "(100.0, 100.0)\n" ">>> e\n" - "Box2d(10.0, 10.0, 110.0, 110.0)\n" - ) + "Box2d(10.0, 10.0, 110.0, 110.0)\n", + py::arg("Coord")) .def("clip", clip, - (arg("other")), "Clip the envelope based on the bounds of another envelope.\n" "\n " "Example:\n" @@ -175,20 +154,18 @@ void export_envelope() ">>> c = Box2d(-50, -50, 50, 50)\n" ">>> e.clip(c)\n" ">>> e\n" - "Box2d(0.0,0.0,50.0,50.0\n" - ) + "Box2d(0.0,0.0,50.0,50.0\n", + py::arg("other")) .def("pad", pad, - (arg("padding")), "Pad the envelope based on a padding value.\n" "\n " "Example:\n" ">>> e = Box2d(0, 0, 100, 100)\n" ">>> e.pad(10)\n" ">>> e\n" - "Box2d(-10.0,-10.0,110.0,110.0\n" - ) + "Box2d(-10.0,-10.0,110.0,110.0\n", + py::arg("padding")) .def("width", width_p1, - (arg("new_width")), "Sets the width to new_width of the envelope preserving its center.\n" "\n " "Example:\n" @@ -197,13 +174,11 @@ void export_envelope() ">>> e.center()\n" "Coord(50.0,50.0)\n" ">>> e\n" - "Box2d(-10.0, 0.0, 110.0, 100.0)\n" - ) + "Box2d(-10.0, 0.0, 110.0, 100.0)\n", + py::arg("new_width")) .def("width", width_p2, - "Returns the width of this envelope.\n" - ) + "Returns the width of this envelope.\n") .def("height", height_p1, - (arg("new_height")), "Sets the height to new_height of the envelope preserving its center.\n" "\n " "Example:\n" @@ -212,59 +187,52 @@ void export_envelope() ">>> e.center()\n" "Coord(50.0,50.0)\n" ">>> e\n" - "Box2d(0.0, -10.0, 100.0, 110.0)\n" - ) + "Box2d(0.0, -10.0, 100.0, 110.0)\n", + py::arg("new_height")) .def("height", height_p2, - "Returns the height of this envelope.\n" - ) + "Returns the height of this envelope.\n") .def("expand_to_include",expand_to_include_p1, - (arg("x"),arg("y")), "Expands this envelope to include the point given by x and y.\n" "\n" "Example:\n", ">>> e = Box2d(0, 0, 100, 100)\n" ">>> e.expand_to_include(110, 110)\n" ">>> e\n" - "Box2d(0.0, 00.0, 110.0, 110.0)\n" - ) + "Box2d(0.0, 00.0, 110.0, 110.0)\n", + py::arg("x"),py::arg("y")) + .def("expand_to_include",expand_to_include_p2, - (arg("p")), - "Equivalent to expand_to_include(p.x, p.y)\n" - ) + "Equivalent to expand_to_include(p.x, p.y)\n", + py::arg("p")) + .def("expand_to_include",expand_to_include_p3, - (arg("other")), "Equivalent to:\n" " expand_to_include(other.minx, other.miny)\n" - " expand_to_include(other.maxx, other.maxy)\n" - ) + " expand_to_include(other.maxx, other.maxy)\n", + py::arg("other")) .def("contains",contains_p1, - (arg("x"),arg("y")), "Returns True iff this envelope contains the point\n" - "given by x and y.\n" - ) + "given by x and y.\n", + py::arg("x"),py::arg("y")) .def("contains",contains_p2, - (arg("p")), - "Equivalent to contains(p.x, p.y)\n" - ) + "Equivalent to contains(p.x, p.y)\n", + py::arg("p")) .def("contains",contains_p3, - (arg("other")), "Equivalent to:\n" - " contains(other.minx, other.miny) and contains(other.maxx, other.maxy)\n" - ) + " contains(other.minx, other.miny) and contains(other.maxx, other.maxy)\n", + py::arg("other")) .def("intersects",intersects_p1, - (arg("x"),arg("y")), "Returns True iff this envelope intersects the point\n" "given by x and y.\n" "\n" "Note: For points, intersection is equivalent\n" "to containment, i.e. the following holds:\n" - " e.contains(x, y) == e.intersects(x, y)\n" - ) + " e.contains(x, y) == e.intersects(x, y)\n", + py::arg("x"),py::arg("y")) .def("intersects",intersects_p2, - (arg("p")), - "Equivalent to contains(p.x, p.y)\n") + "Equivalent to contains(p.x, p.y)\n", + py::arg("p")) .def("intersects",intersects_p3, - (arg("other")), "Returns True iff this envelope intersects the other envelope,\n" "This relationship is symmetric." "\n" @@ -274,10 +242,9 @@ void export_envelope() ">>> e1.intersects(e2)\n" "True\n" ">>> e1.contains(e2)\n" - "False\n" - ) + "False\n", + py::arg("other")) .def("intersect",intersect, - (arg("other")), "Returns the overlap of this envelope and the other envelope\n" "as a new envelope.\n" "\n" @@ -285,18 +252,33 @@ void export_envelope() ">>> e1 = Box2d(0, 0, 100, 100)\n" ">>> e2 = Box2d(50, 50, 150, 150)\n" ">>> e1.intersect(e2)\n" - "Box2d(50.0, 50.0, 100.0, 100.0)\n" - ) - .def(self == self) // __eq__ - .def(self != self) // __neq__ - .def(self + self) // __add__ - .def(self * float()) // __mult__ - .def(float() * self) - .def(self / float()) // __div__ + "Box2d(50.0, 50.0, 100.0, 100.0)\n", + py::arg("other")) + .def(py::self == py::self) // __eq__ + .def(py::self != py::self) // __neq__ + .def(py::self + py::self) // __add__ + .def(py::self * float()) // __mult__ + .def(float() * py::self) + .def(py::self / float()) // __div__ .def("__getitem__",&box2d::operator[]) .def("valid",&box2d::valid) - .def_pickle(envelope_pickle_suite()) - .def("__deepcopy__", &box2d_deepcopy) + .def(py::pickle( + [](box2d const& box) { + return py::make_tuple(box.minx(), box.miny(), box.maxx(), box.maxy()); + }, + [](py::tuple t) { + if (t.size() != 4) + throw std::runtime_error("Invalid state"); + box2d box{t[0].cast(), + t[1].cast(), + t[2].cast(), + t[3].cast()}; + return box; + })) + .def("__repr__", + [](box2d const& box) { + return box.to_string(); + }) ; } diff --git a/src/mapnik_expression.cpp b/src/mapnik_expression.cpp index 60c4a44b1..687326976 100644 --- a/src/mapnik_expression.cpp +++ b/src/mapnik_expression.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,22 +20,10 @@ * *****************************************************************************/ +// mapnik #include #include "python_to_value.hpp" - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - -// mapnik +#include "mapnik_value_converter.hpp" #include #include #include @@ -44,11 +32,19 @@ #include #include +//pybind11 +#include +#include +#include + using mapnik::expression_ptr; using mapnik::parse_expression; using mapnik::to_expression_string; using mapnik::path_expression_ptr; +namespace py = pybind11; + +PYBIND11_MAKE_OPAQUE(mapnik::path_expression); // expression expression_ptr parse_expression_(std::string const& wkt) @@ -61,15 +57,17 @@ std::string expression_to_string_(mapnik::expr_node const& expr) return mapnik::to_expression_string(expr); } -mapnik::value expression_evaluate_(mapnik::expr_node const& expr, mapnik::feature_impl const& f, boost::python::dict const& d) +mapnik::value expression_evaluate_(mapnik::expr_node const& expr, mapnik::feature_impl const& f, py::dict const& d) { // will be auto-converted to proper python type by `mapnik_value_to_python` - return mapnik::util::apply_visitor(mapnik::evaluate(f,mapnik::dict2attr(d)),expr); + return mapnik::util::apply_visitor(mapnik::evaluate(f, mapnik::dict2attr(d)),expr); } -bool expression_evaluate_to_bool_(mapnik::expr_node const& expr, mapnik::feature_impl const& f, boost::python::dict const& d) +bool expression_evaluate_to_bool_(mapnik::expr_node const& expr, mapnik::feature_impl const& f, py::dict const& d) { - return mapnik::util::apply_visitor(mapnik::evaluate(f,mapnik::dict2attr(d)),expr).to_bool(); + return mapnik::util::apply_visitor(mapnik::evaluate(f, mapnik::dict2attr(d)),expr).to_bool(); } // path expression @@ -88,25 +86,19 @@ std::string path_evaluate_(mapnik::path_expression const& expr, mapnik::feature_ return mapnik::path_processor_type::evaluate(expr, f); } -void export_expression() +void export_expression(py::module const& m) { - using namespace boost::python; - class_("Expression", - "TODO" - "",no_init) - .def("evaluate", &expression_evaluate_,(arg("feature"),arg("variables")=boost::python::dict())) - .def("to_bool", &expression_evaluate_to_bool_,(arg("feature"),arg("variables")=boost::python::dict())) - .def("__str__",&expression_to_string_); + py::class_(m, "Expression") + .def(py::init([] (std::string const& wkt) { return parse_expression_(wkt);})) + .def("evaluate", &expression_evaluate_, py::arg("feature"), py::arg("variables") = py::dict()) + .def("to_bool", &expression_evaluate_to_bool_, py::arg("feature"), py::arg("variables") = py::dict()) + .def("__str__", &expression_to_string_); ; - def("Expression",&parse_expression_,(arg("expr")),"Expression string"); - - class_("PathExpression", - "TODO" - "",no_init) - .def("evaluate", &path_evaluate_) // note: "pass" is a reserved word in Python + py::class_(m, "PathExpression") + .def(py::init([] (std::string const& wkt) { return parse_path_(wkt);})) + .def("evaluate", &path_evaluate_) .def("__str__",&path_to_string_); ; - def("PathExpression",&parse_path_,(arg("expr")),"PathExpression string"); } diff --git a/src/mapnik_feature.cpp b/src/mapnik_feature.cpp index 16d9d1750..b532546fb 100644 --- a/src/mapnik_feature.cpp +++ b/src/mapnik_feature.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,37 +20,26 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#include -#include -#include -#pragma GCC diagnostic pop - -// mapnik -#include +#include +#include #include #include #include #include #include -//#include #include -#include +#include +#include "mapnik_value_converter.hpp" // stl #include +//pybind11 +#include +#include + +namespace py = pybind11; namespace { @@ -58,6 +47,7 @@ using mapnik::geometry_utils; using mapnik::context_type; using mapnik::context_ptr; using mapnik::feature_kv_iterator; +using mapnik::value; mapnik::feature_ptr from_geojson_impl(std::string const& json, mapnik::context_ptr const& ctx) { @@ -72,19 +62,19 @@ mapnik::feature_ptr from_geojson_impl(std::string const& json, mapnik::context_p std::string feature_to_geojson(mapnik::feature_impl const& feature) { std::string json; - if (!mapnik::json::to_geojson(json,feature)) + if (!mapnik::util::to_geojson(json,feature)) { throw std::runtime_error("Failed to generate GeoJSON"); } return json; } -mapnik::value __getitem__(mapnik::feature_impl const& feature, std::string const& name) +mapnik::value __getitem__(mapnik::feature_impl const& feature, std::string const& name) { return feature.get(name); } -mapnik::value __getitem2__(mapnik::feature_impl const& feature, std::size_t index) +mapnik::value __getitem2__(mapnik::feature_impl const& feature, std::size_t index) { return feature.get(index); } @@ -94,145 +84,49 @@ void __setitem__(mapnik::feature_impl & feature, std::string const& name, mapnik feature.put_new(name,val); } -boost::python::dict attributes(mapnik::feature_impl const& f) +py::dict attributes(mapnik::feature_impl const& feature) { - boost::python::dict attributes; - feature_kv_iterator itr = f.begin(); - feature_kv_iterator end = f.end(); - - for ( ;itr!=end; ++itr) + auto attributes = py::dict(); + for (auto const& kv : feature) { - attributes[std::get<0>(*itr)] = std::get<1>(*itr); + attributes[std::get<0>(kv).c_str()] = std::get<1>(kv); } - return attributes; } } // end anonymous namespace -struct unicode_string_from_python_str -{ - unicode_string_from_python_str() - { - boost::python::converter::registry::push_back( - &convertible, - &construct, - boost::python::type_id()); - } - - static void* convertible(PyObject* obj_ptr) - { - if (!( -#if PY_VERSION_HEX >= 0x03000000 - PyBytes_Check(obj_ptr) -#else - PyString_Check(obj_ptr) -#endif - || PyUnicode_Check(obj_ptr))) - return 0; - return obj_ptr; - } - - static void construct( - PyObject* obj_ptr, - boost::python::converter::rvalue_from_python_stage1_data* data) - { - char * value=0; - if (PyUnicode_Check(obj_ptr)) { - PyObject *encoded = PyUnicode_AsEncodedString(obj_ptr, "utf8", "replace"); - if (encoded) { -#if PY_VERSION_HEX >= 0x03000000 - value = PyBytes_AsString(encoded); -#else - value = PyString_AsString(encoded); -#endif - Py_DecRef(encoded); - } - } else { -#if PY_VERSION_HEX >= 0x03000000 - value = PyBytes_AsString(obj_ptr); -#else - value = PyString_AsString(obj_ptr); -#endif - } - if (value == 0) boost::python::throw_error_already_set(); - void* storage = ( - (boost::python::converter::rvalue_from_python_storage*) - data)->storage.bytes; - new (storage) mapnik::value_unicode_string(value); - data->convertible = storage; - } -}; - - -struct value_null_from_python +void export_feature(py::module const& m) { - value_null_from_python() - { - boost::python::converter::registry::push_back( - &convertible, - &construct, - boost::python::type_id()); - } - - static void* convertible(PyObject* obj_ptr) - { - if (obj_ptr == Py_None) return obj_ptr; - return 0; - } - - static void construct( - PyObject* obj_ptr, - boost::python::converter::rvalue_from_python_stage1_data* data) - { - if (obj_ptr != Py_None) boost::python::throw_error_already_set(); - void* storage = ( - (boost::python::converter::rvalue_from_python_storage*) - data)->storage.bytes; - new (storage) mapnik::value_null(); - data->convertible = storage; - } -}; - -void export_feature() -{ - using namespace boost::python; - - // Python to mapnik::value converters - // NOTE: order matters here. For example value_null must be listed before - // bool otherwise Py_None will be interpreted as bool (false) - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - - // http://misspent.wordpress.com/2009/09/27/how-to-write-boost-python-converters/ - unicode_string_from_python_str(); - value_null_from_python(); - - class_ - ("Context",init<>("Default ctor.")) + py::class_(m, "Context") + .def(py::init<>(), "Default constructor") .def("push", &context_type::push) ; - class_, - boost::noncopyable>("Feature",init("Default ctor.")) + py::class_>(m, "Feature") + .def(py::init(), "Default constructor") .def("id",&mapnik::feature_impl::id) - .add_property("geometry", - make_function(&mapnik::feature_impl::get_geometry,return_value_policy()), - &mapnik::feature_impl::set_geometry_copy) + //.def_property("id",&mapnik::feature_impl::id, &mapnik::feature_impl::set_id) + .def_property("geometry", + py::cpp_function((mapnik::geometry::geometry& (mapnik::feature_impl::*)()) + &mapnik::feature_impl::get_geometry, py::return_value_policy::reference_internal), + py::cpp_function(&mapnik::feature_impl::set_geometry_copy)) .def("envelope", &mapnik::feature_impl::envelope) .def("has_key", &mapnik::feature_impl::has_key) - .add_property("attributes",&attributes) - .def("__setitem__",&__setitem__) - .def("__contains__",&__getitem__) - .def("__getitem__",&__getitem__) - .def("__getitem__",&__getitem2__) + .def_property_readonly("attributes", [] (mapnik::feature_impl const& f) { return attributes(f) ;}) + .def("__setitem__", &__setitem__) + .def("__contains__" ,&__getitem__) + .def("__getitem__", &__getitem__) + .def("__getitem__", &__getitem2__) .def("__len__", &mapnik::feature_impl::size) - .def("context",&mapnik::feature_impl::context) - .def("to_geojson",&feature_to_geojson) - .def("from_geojson",from_geojson_impl) - .staticmethod("from_geojson") + .def("context", &mapnik::feature_impl::context) + .def("to_json", &feature_to_geojson) + .def("to_geojson", &feature_to_geojson) + .def_property_readonly("__geo_interface__", + [] (mapnik::feature_impl const& f) { + py::object json = py::module_::import("json"); + py::object loads = json.attr("loads"); + return loads(feature_to_geojson(f));}) + .def_static("from_geojson", from_geojson_impl) ; } diff --git a/src/mapnik_featureset.cpp b/src/mapnik_featureset.cpp index 8e9ddf6f5..b59b89657 100644 --- a/src/mapnik_featureset.cpp +++ b/src/mapnik_featureset.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,75 +20,35 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include -namespace { -using namespace boost::python; +//pybind11 +#include +#include +#include -inline list features(mapnik::featureset_ptr const& itr) -{ - list l; - while (true) - { - mapnik::feature_ptr fp = itr->next(); - if (!fp) - { - break; - } - l.append(fp); - } - return l; -} +namespace py = pybind11; -inline object pass_through(object const& o) { return o; } +namespace { inline mapnik::feature_ptr next(mapnik::featureset_ptr const& itr) { mapnik::feature_ptr f = itr->next(); - if (!f) - { - PyErr_SetString(PyExc_StopIteration, "No more features."); - boost::python::throw_error_already_set(); - } - + if (!f) throw py::stop_iteration(); return f; } } -void export_featureset() +void export_featureset(py::module const& m) { - using namespace boost::python; - class_, - boost::noncopyable>("Featureset",no_init) - .def("__iter__",pass_through) - .def("next",next) - .add_property("features",features, - "The list of features.\n" - "\n" - "Usage:\n" - ">>> m.query_map_point(0, 10, 10)\n" - "\n" - ">>> fs = m.query_map_point(0, 10, 10)\n" - ">>> for f in fs.features:\n" - ">>> print f\n" - "\n" - ) + // Featureset implements Python iterator interface + py::class_> + (m, "Featureset") + .def("__iter__", [](mapnik::Featureset& itr) -> mapnik::Featureset& { return itr; }) + .def("__next__", next) ; } diff --git a/src/mapnik_font_engine.cpp b/src/mapnik_font_engine.cpp index dfc89f490..5fdcfedd8 100644 --- a/src/mapnik_font_engine.cpp +++ b/src/mapnik_font_engine.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,42 +20,22 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - #include -#include +//pybind11 +#include +#include -void export_font_engine() +namespace py = pybind11; + +void export_font_engine(py::module const& m) { using mapnik::freetype_engine; - using mapnik::singleton; - using mapnik::CreateStatic; - using namespace boost::python; - class_,boost::noncopyable>("Singleton",no_init) - .def("instance",&singleton::instance, - return_value_policy()) - .staticmethod("instance") - ; - class_ >, - boost::noncopyable>("FontEngine",no_init) - .def("register_font",&freetype_engine::register_font) - .def("register_fonts",&freetype_engine::register_fonts) - .def("face_names",&freetype_engine::face_names) - .staticmethod("register_font") - .staticmethod("register_fonts") - .staticmethod("face_names") + py::class_(m, "FontEngine") + .def_static("register_font", &freetype_engine::register_font) + .def_static("register_fonts", &freetype_engine::register_fonts) + .def_static("face_names", &freetype_engine::face_names) ; } diff --git a/src/mapnik_fontset.cpp b/src/mapnik_fontset.cpp index 651efd11c..243f4faf0 100644 --- a/src/mapnik_fontset.cpp +++ b/src/mapnik_fontset.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,46 +20,34 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - //mapnik +#include #include +//pybind11 +#include +namespace py = pybind11; using mapnik::font_set; -void export_fontset () +void export_fontset (py::module const& m) { - using namespace boost::python; - class_("FontSet", init("default fontset constructor") - ) - .add_property("name", - make_function(&font_set::get_name,return_value_policy()), + py::class_(m, "FontSet") + .def(py::init(), "default fontset constructor") + .def_property("name", + &font_set::get_name, &font_set::set_name, "Get/Set the name of the FontSet.\n" ) - .def("add_face_name",&font_set::add_face_name, - (arg("name")), + .def("add_face_name", &font_set::add_face_name, "Add a face-name to the fontset.\n" "\n" "Example:\n" ">>> fs = Fontset('book-fonts')\n" - ">>> fs.add_face_name('DejaVu Sans Book')\n") - .add_property("names",make_function - (&font_set::get_face_names, - return_value_policy()), - "List of face names belonging to a FontSet.\n" - ) + ">>> fs.add_face_name('DejaVu Sans Book')\n", + py::arg("name")) + .def_property_readonly("names", + &font_set::get_face_names, + "List of face names belonging to a FontSet.\n") ; } diff --git a/src/mapnik_gamma_method.cpp b/src/mapnik_gamma_method.cpp index c1849ccd9..d4648af55 100644 --- a/src/mapnik_gamma_method.cpp +++ b/src/mapnik_gamma_method.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,31 +20,24 @@ * *****************************************************************************/ +// mapnik #include - -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - #include -#include "mapnik_enumeration.hpp" +//pybind11 +#include +#include -void export_gamma_method() -{ - using namespace boost::python; +namespace py = pybind11; - mapnik::enumeration_("gamma_method") - .value("POWER", mapnik::GAMMA_POWER) - .value("LINEAR",mapnik::GAMMA_LINEAR) - .value("NONE", mapnik::GAMMA_NONE) - .value("THRESHOLD", mapnik::GAMMA_THRESHOLD) - .value("MULTIPLY", mapnik::GAMMA_MULTIPLY) +void export_gamma_method(py::module const& m) +{ + py::native_enum(m, "gamma_method", "enum.Enum") + .value("POWER", mapnik::gamma_method_enum::GAMMA_POWER) + .value("LINEAR",mapnik::gamma_method_enum::GAMMA_LINEAR) + .value("NONE", mapnik::gamma_method_enum::GAMMA_NONE) + .value("THRESHOLD", mapnik::gamma_method_enum::GAMMA_THRESHOLD) + .value("MULTIPLY", mapnik::gamma_method_enum::GAMMA_MULTIPLY) + .finalize() ; } diff --git a/src/mapnik_geometry.cpp b/src/mapnik_geometry.cpp index d2ec6c152..64243636c 100644 --- a/src/mapnik_geometry.cpp +++ b/src/mapnik_geometry.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,47 +20,36 @@ * *****************************************************************************/ +// mapnik #include -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#include -#include -#include -#include -#pragma GCC diagnostic pop - // mapnik #include -#include -#include -#include -#include -#include -#include -#include - +#include +#include +#include +#include +#include +#include +#include #include // from_wkt #include // from_geojson #include // to_geojson #include // to_wkb #include // to_wkt -//#include #include - +#include "python_variant.hpp" // stl #include +//pybind11 +#include +#include +#include + +namespace py = pybind11; + namespace { std::shared_ptr > from_wkb_impl(std::string const& wkb) @@ -95,33 +84,16 @@ std::shared_ptr > from_geojson_impl(std::stri } -inline std::string boost_version() +template +py::object to_wkb_impl(GeometryType const& geom, mapnik::wkbByteOrder byte_order) { - std::ostringstream s; - s << BOOST_VERSION/100000 << "." << BOOST_VERSION/100 % 1000 << "." << BOOST_VERSION % 100; - return s.str(); + mapnik::util::wkb_buffer_ptr wkb = mapnik::util::to_wkb(geom, byte_order); + if (wkb) return py::bytes(wkb->buffer(), wkb->size()); + return py::none(); } -PyObject* to_wkb_impl(mapnik::geometry::geometry const& geom, mapnik::wkbByteOrder byte_order) -{ - mapnik::util::wkb_buffer_ptr wkb = mapnik::util::to_wkb(geom,byte_order); - if (wkb) - { - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - ((const char*)wkb->buffer(),wkb->size()); - } - else - { - Py_RETURN_NONE; - } -} - -std::string to_geojson_impl(mapnik::geometry::geometry const& geom) +template +std::string to_geojson_impl(GeometryType const& geom) { std::string wkt; if (!mapnik::util::to_geojson(wkt, geom)) @@ -131,7 +103,8 @@ std::string to_geojson_impl(mapnik::geometry::geometry const& geom) return wkt; } -std::string to_wkt_impl(mapnik::geometry::geometry const& geom) +template +std::string to_wkt_impl(GeometryType const& geom) { std::string wkt; if (!mapnik::util::to_wkt(wkt,geom)) @@ -146,25 +119,26 @@ mapnik::geometry::geometry_types geometry_type_impl(mapnik::geometry::geometry geometry_envelope_impl(mapnik::geometry::geometry const& geom) +template +mapnik::box2d geometry_envelope_impl(GeometryType const& geom) { return mapnik::geometry::envelope(geom); } -// only Boost >= 1.56 contains the is_valid and is_simple functions -#if BOOST_VERSION >= 105600 -bool geometry_is_valid_impl(mapnik::geometry::geometry const& geom) +template +bool geometry_is_valid_impl(GeometryType const& geom) { return mapnik::geometry::is_valid(geom); } -bool geometry_is_simple_impl(mapnik::geometry::geometry const& geom) +template +bool geometry_is_simple_impl(GeometryType const& geom) { return mapnik::geometry::is_simple(geom); } -#endif -bool geometry_is_empty_impl(mapnik::geometry::geometry const& geom) +template +bool geometry_is_empty_impl(GeometryType const& geom) { return mapnik::geometry::is_empty(geom); } @@ -174,14 +148,16 @@ void geometry_correct_impl(mapnik::geometry::geometry & geom) mapnik::geometry::correct(geom); } -void polygon_set_exterior_impl(mapnik::geometry::polygon & poly, mapnik::geometry::linear_ring const& ring) +template +void add_coord(T & geom, double x, double y) { - poly.exterior_ring = ring; // copy + geom.emplace_back(x, y); } -void polygon_add_hole_impl(mapnik::geometry::polygon & poly, mapnik::geometry::linear_ring const& ring) +template +void add_impl(Dst & geom, Src const& src) { - poly.interior_rings.push_back(ring); // copy + geom.push_back(src); // copy } mapnik::geometry::point geometry_centroid_impl(mapnik::geometry::geometry const& geom) @@ -192,14 +168,20 @@ mapnik::geometry::point geometry_centroid_impl(mapnik::geometry::geometr } -void export_geometry() +void export_geometry(py::module const& m) { - using namespace boost::python; + using mapnik::geometry::geometry; + using mapnik::geometry::point; + using mapnik::geometry::line_string; + using mapnik::geometry::linear_ring; + using mapnik::geometry::polygon; + using mapnik::geometry::multi_point; + using mapnik::geometry::multi_line_string; + using mapnik::geometry::multi_polygon; + using mapnik::geometry::geometry_collection; + - implicitly_convertible, mapnik::geometry::geometry >(); - implicitly_convertible, mapnik::geometry::geometry >(); - implicitly_convertible, mapnik::geometry::geometry >(); - enum_("GeometryType") + py::native_enum(m, "GeometryType", "enum.Enum") .value("Unknown",mapnik::geometry::geometry_types::Unknown) .value("Point",mapnik::geometry::geometry_types::Point) .value("LineString",mapnik::geometry::geometry_types::LineString) @@ -208,84 +190,172 @@ void export_geometry() .value("MultiLineString",mapnik::geometry::geometry_types::MultiLineString) .value("MultiPolygon",mapnik::geometry::geometry_types::MultiPolygon) .value("GeometryCollection",mapnik::geometry::geometry_types::GeometryCollection) + .finalize() ; - enum_("wkbByteOrder") - .value("XDR",mapnik::wkbXDR) - .value("NDR",mapnik::wkbNDR) + py::native_enum(m, "wkbByteOrder", "enum.Enum") + .value("XDR", mapnik::wkbXDR) + .value("NDR", mapnik::wkbNDR) + .finalize() ; - using mapnik::geometry::geometry; - using mapnik::geometry::point; - using mapnik::geometry::line_string; - using mapnik::geometry::linear_ring; - using mapnik::geometry::polygon; - class_ >("Point", init((arg("x"), arg("y")), - "Constructs a new Point object\n")) - .add_property("x", &point::x, "X coordinate") - .add_property("y", &point::y, "Y coordinate") -#if BOOST_VERSION >= 105600 - .def("is_valid", &geometry_is_valid_impl) - .def("is_simple", &geometry_is_simple_impl) -#endif - .def("to_geojson",&to_geojson_impl) - .def("to_wkb",&to_wkb_impl) - .def("to_wkt",&to_wkt_impl) + py::class_ >(m, "Point") + .def(py::init(), + "Constructs a new Point object\n", + py::arg("x"), py::arg("y")) + .def_readwrite("x", &point::x, "X coordinate") + .def_readwrite("y", &point::y, "Y coordinate") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + ; + + py::class_>(m, "MultiPoint") + .def(py::init<>(), + "Constructs a new MultiPoint object\n") + .def("add_point", &add_coord>, "Adds coord x,y") + .def("add_point", &add_impl, point>, "Adds mapnik.Point") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("num_points",[](multi_point const& mp) { return mp.size(); },"Number of points in MultiPoint") + .def("__len__", [](multi_pointconst &mp) { return mp.size(); }) + .def("__iter__", [](multi_point const& mp) { + return py::make_iterator(mp.begin(), mp.end()); + }, py::keep_alive<0, 1>()) ; - class_ >("LineString", init<>( - "Constructs a new LineString object\n")) - .def("add_coord", &line_string::add_coord, "Adds coord") -#if BOOST_VERSION >= 105600 - .def("is_valid", &geometry_is_valid_impl) - .def("is_simple", &geometry_is_simple_impl) -#endif - .def("to_geojson",&to_geojson_impl) - .def("to_wkb",&to_wkb_impl) - .def("to_wkt",&to_wkt_impl) + py::class_ >(m, "LineString") + .def(py::init<>(), "Constructs a new LineString object\n") + .def("add_point", &add_coord>, "Adds coord x,y") + .def("add_point", &add_impl, point>, "Adds mapnik.Point") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("num_points",[](line_string const& l) { return l.size(); },"Number of points in LineString") + .def("__len__", [](line_stringconst &l) { return l.size(); }) + .def("__iter__", [](line_string const& l) { + return py::make_iterator(l.begin(), l.end()); + }, py::keep_alive<0, 1>()) ; - class_ >("LinearRing", init<>( - "Constructs a new LinearRtring object\n")) - .def("add_coord", &linear_ring::add_coord, "Adds coord") + py::class_ >(m, "LinearRing") + .def(py::init<>(), "Constructs a new LinearRtring object\n") + .def("add_point", &add_coord>, "Adds coord x,y") + .def("add_point", &add_impl, point>, "Adds mapnik.Point") + .def("envelope",&geometry_envelope_impl>) + .def("__len__", [](linear_ringconst &r) { return r.size(); }) + .def("__iter__", [](linear_ring const& r) { + return py::make_iterator(r.begin(), r.end()); + }, py::keep_alive<0, 1>()) ; - class_ >("Polygon", init<>( - "Constructs a new Polygon object\n")) - .add_property("exterior_ring", &polygon::exterior_ring , "Exterior ring") - .def("add_hole", &polygon_add_hole_impl, "Add interior ring") - .def("num_rings", polygon_set_exterior_impl, "Number of rings (at least 1)") -#if BOOST_VERSION >= 105600 - .def("is_valid", &geometry_is_valid_impl) - .def("is_simple", &geometry_is_simple_impl) -#endif - .def("to_geojson",&to_geojson_impl) - .def("to_wkb",&to_wkb_impl) - .def("to_wkt",&to_wkt_impl) + py::class_ >(m, "Polygon") + .def(py::init<>(), "Constructs a new Polygon object\n") + .def("add_ring", &add_impl, linear_ring>, "Add ring") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("num_rings", [](polygonconst &p) { return p.size(); }, "Number of rings") + .def("__len__", [](polygonconst &p) { return p.size(); }) + .def("__iter__", [](polygon const& p) { + return py::make_iterator(p.begin(), p.end()); + }, py::keep_alive<0, 1>()) ; - class_, std::shared_ptr >, boost::noncopyable>("Geometry",no_init) - .def("envelope",&geometry_envelope_impl) - .def("from_geojson", from_geojson_impl) - .def("from_wkt", from_wkt_impl) - .def("from_wkb", from_wkb_impl) - .staticmethod("from_geojson") - .staticmethod("from_wkt") - .staticmethod("from_wkb") - .def("__str__",&to_wkt_impl) + py::class_ >(m, "MultiLineString") + .def(py::init<>(), "Constructs a new MultiLineString object\n") + .def("add_string", &add_impl, line_string>, "Add LineString") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("__len__", [](multi_line_stringconst& mls) { return mls.size(); }) + .def("__iter__", [](multi_line_string const& mls) { + return py::make_iterator(mls.begin(), mls.end()); + }, py::keep_alive<0, 1>()) + ; + + py::class_ >(m, "MultiPolygon") + .def(py::init<>(), "Constructs a new MultiPolygon object\n") + .def("add_polygon", &add_impl, polygon>, "Add Polygon") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("__len__", [](multi_polygonconst& mp) { return mp.size(); }) + .def("__iter__", [](multi_polygon const& mp) { + return py::make_iterator(mp.begin(), mp.end()); + }, py::keep_alive<0, 1>()) + ; + + py::class_ >(m, "GeometryCollection") + .def(py::init<>(), "Constructs a new GeometryCollection object\n") + .def("add_geometry", &add_impl, geometry>, "Add Geometry") + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("to_geojson",&to_geojson_impl>) + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("envelope",&geometry_envelope_impl>) + .def("__len__", [](geometry_collectionconst& gc) { return gc.size(); }) + .def("__iter__", [](geometry_collection const& gc) { + return py::make_iterator(gc.begin(), gc.end()); + }, py::keep_alive<0, 1>()) + ; + + py::class_, std::shared_ptr>>(m, "Geometry") + .def(py::init>()) + .def(py::init>()) + .def(py::init>()) + .def(py::init>()) + .def(py::init>()) + .def(py::init>()) + .def(py::init>()) + .def("envelope",&geometry_envelope_impl>) + .def_static("from_geojson", from_geojson_impl) + .def_static("from_wkt", from_wkt_impl) + .def_static("from_wkb", from_wkb_impl) + .def("__str__",&to_wkt_impl>) .def("type",&geometry_type_impl) -#if BOOST_VERSION >= 105600 - .def("is_valid", &geometry_is_valid_impl) - .def("is_simple", &geometry_is_simple_impl) -#endif - .def("is_empty", &geometry_is_empty_impl) + .def("is_valid", &geometry_is_valid_impl>) + .def("is_simple", &geometry_is_simple_impl>) + .def("is_empty", &geometry_is_empty_impl>) .def("correct", &geometry_correct_impl) .def("centroid",&geometry_centroid_impl) - .def("to_wkb",&to_wkb_impl) - .def("to_wkt",&to_wkt_impl) - .def("to_geojson",&to_geojson_impl) - //.def("to_svg",&to_svg) - // TODO add other geometry_type methods + .def("to_wkb",&to_wkb_impl>) + .def("to_wkt",&to_wkt_impl>) + .def("to_json",&to_geojson_impl>) + .def("to_geojson",&to_geojson_impl>) + .def_property_readonly("__geo_interface__", [](geometry const& g) { + py::object json = py::module_::import("json"); + py::object loads = json.attr("loads"); + return loads(to_geojson_impl>(g));}) ; + + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + py::implicitly_convertible, mapnik::geometry::geometry>(); + } diff --git a/src/mapnik_grid.cpp b/src/mapnik_grid.cpp index 1147ac261..973159f1d 100644 --- a/src/mapnik_grid.cpp +++ b/src/mapnik_grid.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -21,30 +21,18 @@ *****************************************************************************/ #if defined(GRID_RENDERER) - -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include "python_grid_utils.hpp" -using namespace boost::python; +//pybind11 +#include + +namespace py = pybind11; // help compiler see template definitions -static dict (*encode)( mapnik::grid const&, std::string const& , bool, unsigned int) = mapnik::grid_encode; +static py::dict (*encode)( mapnik::grid const&, std::string const& , bool, unsigned int) = mapnik::grid_encode; bool painted(mapnik::grid const& grid) { @@ -58,32 +46,27 @@ mapnik::grid::value_type get_pixel(mapnik::grid const& grid, int x, int y) mapnik::grid::data_type const & data = grid.data(); return data(x,y); } - PyErr_SetString(PyExc_IndexError, "invalid x,y for grid dimensions"); - boost::python::throw_error_already_set(); - return 0; + throw py::index_error("invalid x,y for grid dimensions"); } -void export_grid() +void export_grid(py::module const& m) { - class_ >( - "Grid", - "This class represents a feature hitgrid.", - init( - ( boost::python::arg("width"), boost::python::arg("height"),boost::python::arg("key")="__id__"), - "Create a mapnik.Grid object\n" - )) + py::class_> + (m, "Grid", "This class represents a feature hitgrid.") + .def(py::init(), + "Create a mapnik.Grid object\n", + py::arg("width"), py::arg("height"), py::arg("key")="__id__") .def("painted",&painted) .def("width",&mapnik::grid::width) .def("height",&mapnik::grid::height) .def("view",&mapnik::grid::get_view) .def("get_pixel",&get_pixel) .def("clear",&mapnik::grid::clear) - .def("encode",encode, - ( boost::python::arg("encoding")="utf", boost::python::arg("features")=true,boost::python::arg("resolution")=4 ), - "Encode the grid as as optimized json\n" - ) - .add_property("key", - make_function(&mapnik::grid::get_key,return_value_policy()), + .def("encode", encode, + "Encode the grid as as optimized json\n", + py::arg("encoding") = "utf", py::arg("features") = true, py::arg("resolution") = 4) + .def_property("key", + &mapnik::grid::get_key, &mapnik::grid::set_key, "Get/Set key to be used as unique indentifier for features\n" "The value should either be __id__ to refer to the feature.id()\n" diff --git a/src/mapnik_grid_view.cpp b/src/mapnik_grid_view.cpp index 230ccc0d3..2b0bb0411 100644 --- a/src/mapnik_grid_view.cpp +++ b/src/mapnik_grid_view.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -21,44 +21,25 @@ *****************************************************************************/ #if defined(GRID_RENDERER) - -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include #include "python_grid_utils.hpp" -using namespace boost::python; - // help compiler see template definitions -static dict (*encode)( mapnik::grid_view const&, std::string const& , bool, unsigned int) = mapnik::grid_encode; +static py::dict (*encode)( mapnik::grid_view const&, std::string const& , bool, unsigned int) = mapnik::grid_encode; -void export_grid_view() +void export_grid_view(py::module const& m) { - class_ >("GridView", - "This class represents a feature hitgrid subset.",no_init) + py::class_> + (m, "GridView", "This class represents a feature hitgrid subset.") .def("width",&mapnik::grid_view::width) .def("height",&mapnik::grid_view::height) .def("encode",encode, - ( boost::python::arg("encoding")="utf",boost::python::arg("add_features")=true,boost::python::arg("resolution")=4 ), - "Encode the grid as as optimized json\n" - ) + "Encode the grid as as optimized json\n", + py::arg("encoding")="utf",py::arg("add_features")=true,py::arg("resolution")=4) ; } diff --git a/src/mapnik_enumeration_wrapper_converter.hpp b/src/mapnik_group_symbolizer.cpp similarity index 63% rename from src/mapnik_enumeration_wrapper_converter.hpp rename to src/mapnik_group_symbolizer.cpp index 45e5f7f37..be818a0b7 100644 --- a/src/mapnik_enumeration_wrapper_converter.hpp +++ b/src/mapnik_group_symbolizer.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,26 +20,26 @@ * *****************************************************************************/ -#ifndef MAPNIK_BINDINGS_PYTHON_ENUMERATION_WRAPPPER -#define MAPNIK_BINDINGS_PYTHON_ENUMERATION_WRAPPPER - // mapnik +#include #include - -// boost -#include - - -namespace boost { namespace python { - - struct mapnik_enumeration_wrapper_to_python - { - static PyObject* convert(mapnik::enumeration_wrapper const& v) - { - return ::PyLong_FromLongLong(v.value); // FIXME: this is a temp hack!! - } - }; - -}} - -#endif // MAPNIK_BINDINGS_PYTHON_ENUMERATION_WRAPPPER +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_group_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::group_symbolizer; + + py::class_(m, "GroupSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + ; + +} diff --git a/src/mapnik_image.cpp b/src/mapnik_image.cpp index 89ae397a7..b66f29f07 100644 --- a/src/mapnik_image.cpp +++ b/src/mapnik_image.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,22 +20,8 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include @@ -43,14 +29,12 @@ #include #include #include - -// cairo -#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) -#include -#include -#include -#include -#endif +//stl +#include +//pybind11 +#include +#include +#include using mapnik::image_any; using mapnik::image_reader; @@ -58,46 +42,28 @@ using mapnik::get_image_reader; using mapnik::type_from_filename; using mapnik::save_to_file; -using namespace boost::python; +namespace py = pybind11; +namespace { // output 'raw' pixels -PyObject* tostring1( image_any const& im) +py::object to_string1(image_any const& im) { - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - ((const char*)im.bytes(),im.size()); + return py::bytes(reinterpret_cast(im.bytes()), im.size()); } // encode (png,jpeg) -PyObject* tostring2(image_any const & im, std::string const& format) +py::object to_string2(image_any const & im, std::string const& format) { std::string s = mapnik::save_to_string(im, format); - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - (s.data(),s.size()); + return py::bytes(s.data(), s.length()); } -PyObject* tostring3(image_any const & im, std::string const& format, mapnik::rgba_palette const& pal) +py::object to_string3(image_any const & im, std::string const& format, mapnik::rgba_palette const& pal) { std::string s = mapnik::save_to_string(im, format, pal); - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - (s.data(),s.size()); + return py::bytes(s.data(), s.length()); } - void save_to_file1(mapnik::image_any const& im, std::string const& filename) { save_to_file(im,filename); @@ -143,7 +109,7 @@ std::shared_ptr copy(mapnik::image_any const& im, mapnik::image_dtype return std::make_shared(mapnik::image_copy(im, type, offset, scaling)); } -unsigned compare(mapnik::image_any const& im1, mapnik::image_any const& im2, double threshold, bool alpha) +std::size_t compare(mapnik::image_any const& im1, mapnik::image_any const& im2, double threshold, bool alpha) { return mapnik::compare(im1, im2, threshold, alpha); } @@ -153,84 +119,62 @@ struct get_pixel_visitor get_pixel_visitor(unsigned x, unsigned y) : x_(x), y_(y) {} - object operator() (mapnik::image_null const&) + py::object operator() (mapnik::image_null const&) { throw std::runtime_error("Can not return a null image from a pixel (shouldn't have reached here)"); } template - object operator() (T const& im) + py::object operator() (T const& im) { using pixel_type = typename T::pixel_type; - return object(mapnik::get_pixel(im, x_, y_)); + using python_type = typename std::conditional::value, py::int_, py::float_>::type; + return python_type(mapnik::get_pixel(im, x_, y_)); } - private: unsigned x_; unsigned y_; }; -object get_pixel(mapnik::image_any const& im, unsigned x, unsigned y, bool get_color) +py::object get_pixel(mapnik::image_any const& im, int x, int y) { - if (x < static_cast(im.width()) && y < static_cast(im.height())) + if (x < 0 || x >= static_cast(im.width()) || + y < 0 || y >= static_cast(im.height())) { - if (get_color) - { - return object( - mapnik::get_pixel(im, x, y) - ); - } - else - { - return mapnik::util::apply_visitor(get_pixel_visitor(x, y), im); - } + throw std::out_of_range("invalid x,y for image dimensions"); } - PyErr_SetString(PyExc_IndexError, "invalid x,y for image dimensions"); - boost::python::throw_error_already_set(); - return object(); + return mapnik::util::apply_visitor(get_pixel_visitor(x, y), im); } -void set_pixel_color(mapnik::image_any & im, unsigned x, unsigned y, mapnik::color const& c) +mapnik::color get_pixel_color(mapnik::image_any const& im, int x, int y) { - if (x >= static_cast(im.width()) && y >= static_cast(im.height())) + if (x < 0 || x >= static_cast(im.width()) || + y < 0 || y >= static_cast(im.height())) { - PyErr_SetString(PyExc_IndexError, "invalid x,y for image dimensions"); - boost::python::throw_error_already_set(); - return; + throw std::out_of_range("invalid x,y for image dimensions"); } - mapnik::set_pixel(im, x, y, c); + return mapnik::get_pixel(im, x, y); } -void set_pixel_double(mapnik::image_any & im, unsigned x, unsigned y, double val) +template +void set_pixel(mapnik::image_any & im, int x, int y, T c) { - if (x >= static_cast(im.width()) && y >= static_cast(im.height())) + if (x < 0 || x >= static_cast(im.width()) || + y < 0 || y >= static_cast(im.height())) { - PyErr_SetString(PyExc_IndexError, "invalid x,y for image dimensions"); - boost::python::throw_error_already_set(); - return; + throw std::out_of_range("invalid x,y for image dimensions"); } - mapnik::set_pixel(im, x, y, val); -} - -void set_pixel_int(mapnik::image_any & im, unsigned x, unsigned y, int val) -{ - if (x >= static_cast(im.width()) && y >= static_cast(im.height())) - { - PyErr_SetString(PyExc_IndexError, "invalid x,y for image dimensions"); - boost::python::throw_error_already_set(); - return; - } - mapnik::set_pixel(im, x, y, val); + mapnik::set_pixel(im, x, y, c); } -unsigned get_type(mapnik::image_any & im) +mapnik::image_dtype get_type(mapnik::image_any & im) { return im.get_dtype(); } std::shared_ptr open_from_file(std::string const& filename) { - boost::optional type = type_from_filename(filename); + auto type = type_from_filename(filename); if (type) { std::unique_ptr reader(get_image_reader(filename,*type)); @@ -243,29 +187,60 @@ std::shared_ptr open_from_file(std::string const& filename) throw mapnik::image_reader_exception("Unsupported image format:" + filename); } -std::shared_ptr fromstring(std::string const& str) +std::shared_ptr open_from_file2(py::args const& args) +{ + auto filename = args[0].cast(); + std::uint32_t x0 = args[1].cast(); + std::uint32_t y0 = args[2].cast(); + std::uint32_t width = args[3].cast(); + std::uint32_t height = args[4].cast(); + auto type = type_from_filename(filename); + + if (type) + { + std::unique_ptr reader(get_image_reader(filename,*type)); + if (reader.get()) + { + return std::make_shared(reader->read(x0, y0, width, height)); + } + throw mapnik::image_reader_exception("Failed to load: " + filename); + } + throw mapnik::image_reader_exception("Unsupported image format:" + filename); +} + +std::shared_ptr from_string(std::string const& str) { std::unique_ptr reader(get_image_reader(str.c_str(),str.size())); if (reader.get()) { return std::make_shared(reader->read(0,0,reader->width(), reader->height())); } - throw mapnik::image_reader_exception("Failed to load image from buffer" ); + throw mapnik::image_reader_exception("Failed to load image from String" ); } -std::shared_ptr frombuffer(PyObject * obj) +std::shared_ptr from_buffer(py::bytes const& obj) { - void const* buffer=0; - Py_ssize_t buffer_len; - if (PyObject_AsReadBuffer(obj, &buffer, &buffer_len) == 0) + std::string_view view = std::string_view(obj); + std::unique_ptr reader + (get_image_reader(reinterpret_cast(view.data()), view.length())); + if (reader.get()) { - std::unique_ptr reader(get_image_reader(reinterpret_cast(buffer),buffer_len)); - if (reader.get()) - { - return std::make_shared(reader->read(0,0,reader->width(),reader->height())); - } + return std::make_shared(reader->read(0, 0, reader->width(), reader->height())); } - throw mapnik::image_reader_exception("Failed to load image from buffer" ); + throw mapnik::image_reader_exception("Failed to load image from Buffer" ); +} + +std::shared_ptr from_memoryview(py::memoryview const& memview) +{ + auto buf = py::buffer(memview); + py::buffer_info info = buf.request(); + std::unique_ptr reader + (get_image_reader(reinterpret_cast(info.ptr), info.size)); + if (reader.get()) + { + return std::make_shared(reader->read(0, 0, reader->width(), reader->height())); + } + throw mapnik::image_reader_exception("Failed to load image from Buffer" ); } void set_grayscale_to_alpha(image_any & im) @@ -323,60 +298,70 @@ void composite(image_any & dst, image_any & src, mapnik::composite_mode_e mode, } } -#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) -std::shared_ptr from_cairo(PycairoSurface* py_surface) -{ - mapnik::cairo_surface_ptr surface(cairo_surface_reference(py_surface->surface), mapnik::cairo_surface_closer()); - mapnik::image_rgba8 image = mapnik::image_rgba8(cairo_image_surface_get_width(&*surface), cairo_image_surface_get_height(&*surface)); - cairo_image_to_rgba8(image, surface); - return std::make_shared(std::move(image)); -} -#endif - -void export_image() -{ - using namespace boost::python; - // NOTE: must match list in include/mapnik/image_compositing.hpp - enum_("CompositeOp") - .value("clear", mapnik::clear) - .value("src", mapnik::src) - .value("dst", mapnik::dst) - .value("src_over", mapnik::src_over) - .value("dst_over", mapnik::dst_over) - .value("src_in", mapnik::src_in) - .value("dst_in", mapnik::dst_in) - .value("src_out", mapnik::src_out) - .value("dst_out", mapnik::dst_out) - .value("src_atop", mapnik::src_atop) - .value("dst_atop", mapnik::dst_atop) - .value("xor", mapnik::_xor) - .value("plus", mapnik::plus) - .value("minus", mapnik::minus) - .value("multiply", mapnik::multiply) - .value("screen", mapnik::screen) - .value("overlay", mapnik::overlay) - .value("darken", mapnik::darken) - .value("lighten", mapnik::lighten) - .value("color_dodge", mapnik::color_dodge) - .value("color_burn", mapnik::color_burn) - .value("hard_light", mapnik::hard_light) - .value("soft_light", mapnik::soft_light) - .value("difference", mapnik::difference) - .value("exclusion", mapnik::exclusion) - .value("contrast", mapnik::contrast) - .value("invert", mapnik::invert) - .value("grain_merge", mapnik::grain_merge) - .value("grain_extract", mapnik::grain_extract) - .value("hue", mapnik::hue) - .value("saturation", mapnik::saturation) - .value("color", mapnik::_color) - .value("value", mapnik::_value) - .value("linear_dodge", mapnik::linear_dodge) - .value("linear_burn", mapnik::linear_burn) - .value("divide", mapnik::divide) - ; +std::shared_ptr from_cairo(py::object const& surface) +{ + py::object ImageSurface = py::module_::import("cairo").attr("ImageSurface"); + py::object get_width = ImageSurface.attr("get_width"); + py::object get_height = ImageSurface.attr("get_height"); + py::object get_format = ImageSurface.attr("get_format"); + py::object get_data = ImageSurface.attr("get_data"); + int format = py::int_(get_format(surface)); + int width = py::int_(get_width(surface)); + int height = py::int_(get_height(surface)); + if (format == 0 ) // cairo.Format.ARGB32 + { + mapnik::image_rgba8 image{width, height}; + py::memoryview view = get_data(surface); + auto buf = py::buffer(view); + py::buffer_info info = buf.request(); + const std::unique_ptr out_row(new unsigned int[width]); + unsigned int const* in_row = reinterpret_cast(info.ptr); + for (int row = 0; row < height; row++, in_row += width) + { + for (int column = 0; column < width; column++) + { + unsigned int in = in_row[column]; + unsigned int a = (in >> 24) & 0xff; + unsigned int r = (in >> 16) & 0xff; + unsigned int g = (in >> 8) & 0xff; + unsigned int b = (in >> 0) & 0xff; + out_row[column] = mapnik::color(r, g, b, a).rgba(); + } + image.set_row(row, out_row.get(), width); + } + return std::make_shared(std::move(image)); + } + else if (format == 1 ) // cairo.Format.RGB24 + { + mapnik::image_rgba8 image{width, height}; + py::memoryview view = get_data(surface); + auto buf = py::buffer(view); + py::buffer_info info = buf.request(); + const std::unique_ptr out_row(new unsigned int[width]); + unsigned int const* in_row = reinterpret_cast(info.ptr); + for (int row = 0; row < height; row++, in_row += width) + { + for (int column = 0; column < width; column++) + { + unsigned int in = in_row[column]; + unsigned int r = (in >> 16) & 0xff; + unsigned int g = (in >> 8) & 0xff; + unsigned int b = (in >> 0) & 0xff; + out_row[column] = mapnik::color(r, g, b, 255).rgba(); + } + image.set_row(row, out_row.get(), width); + } + return std::make_shared(std::move(image)); + } + + throw std::runtime_error("Unable to convert this Cairo format to rgba8 image"); +} - enum_("ImageType") +} // namespace + +void export_image(py::module const& m) +{ + py::native_enum(m, "ImageType", "enum.Enum") .value("rgba8", mapnik::image_dtype_rgba8) .value("gray8", mapnik::image_dtype_gray8) .value("gray8s", mapnik::image_dtype_gray8s) @@ -388,13 +373,15 @@ void export_image() .value("gray64", mapnik::image_dtype_gray64) .value("gray64s", mapnik::image_dtype_gray64s) .value("gray64f", mapnik::image_dtype_gray64f) + .finalize() ; - class_, boost::noncopyable >("Image","This class represents a image.",init()) - .def(init()) - .def(init()) - .def(init()) - .def(init()) + py::class_>(m, "Image","This class represents a image.") + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) .def("width",&image_any::width) .def("height",&image_any::height) .def("view",&get_view) @@ -408,65 +395,53 @@ void export_image() .def("set_color_to_alpha",&set_color_to_alpha, "Set a given color to the alpha channel of the Image") .def("apply_opacity",&apply_opacity, "Set the opacity of the Image relative to the current alpha of each pixel.") .def("composite",&composite, - ( arg("self"), - arg("image"), - arg("mode")=mapnik::src_over, - arg("opacity")=1.0f, - arg("dx")=0, - arg("dy")=0 - )) + py::arg("image"), + py::arg("mode") = mapnik::src_over, + py::arg("opacity") = 1.0f, + py::arg("dx") = 0, + py::arg("dy") = 0 + ) .def("compare",&compare, - ( arg("self"), - arg("image"), - arg("threshold")=0.0, - arg("alpha")=true - )) + py::arg("image"), + py::arg("threshold")=0.0, + py::arg("alpha")=true + ) .def("copy",©, - ( arg("self"), - arg("type"), - arg("offset")=0.0, - arg("scaling")=1.0 - )) - .add_property("offset", + py::arg("type"), + py::arg("offset")=0.0, + py::arg("scaling")=1.0 + ) + .def_property("offset", &image_any::get_offset, &image_any::set_offset, "Gets or sets the offset component.\n") - .add_property("scaling", + .def_property("scaling", &image_any::get_scaling, &image_any::set_scaling, "Gets or sets the offset component.\n") .def("premultiplied",&premultiplied) .def("premultiply",&premultiply) .def("demultiply",&demultiply) - .def("set_pixel",&set_pixel_color) - .def("set_pixel",&set_pixel_double) - .def("set_pixel",&set_pixel_int) - .def("get_pixel",&get_pixel, - ( arg("self"), - arg("x"), - arg("y"), - arg("get_color")=false - )) + .def("set_pixel",&set_pixel) + .def("set_pixel",&set_pixel) + .def("set_pixel",&set_pixel) + .def("get_pixel_color",&get_pixel_color, + py::arg("x"), py::arg("y")) + .def("get_pixel", &get_pixel) .def("get_type",&get_type) .def("clear",&clear) - //TODO(haoyu) The method name 'tostring' might be confusing since they actually return bytes in Python 3 - - .def("tostring",&tostring1) - .def("tostring",&tostring2) - .def("tostring",&tostring3) + .def("to_string",&to_string1) + .def("to_string",&to_string2) + .def("to_string",&to_string3) .def("save", &save_to_file1) .def("save", &save_to_file2) .def("save", &save_to_file3) - .def("open",open_from_file) - .staticmethod("open") - .def("frombuffer",&frombuffer) - .staticmethod("frombuffer") - .def("fromstring",&fromstring) - .staticmethod("fromstring") -#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) - .def("from_cairo",&from_cairo) - .staticmethod("from_cairo") -#endif + .def_static("open",open_from_file) + .def_static("open",open_from_file2) + .def_static("from_buffer",&from_buffer) + .def_static("from_memoryview",&from_memoryview) + .def_static("from_string",&from_string) + .def_static("from_cairo",&from_cairo) ; } diff --git a/src/mapnik_image_view.cpp b/src/mapnik_image_view.cpp index 1086cda1c..2359efe09 100644 --- a/src/mapnik_image_view.cpp +++ b/src/mapnik_image_view.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,69 +20,44 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - -// mapnik #include #include #include #include #include +#include +//stl #include +//pybind11 +#include +#include using mapnik::image_view_any; using mapnik::save_to_file; +namespace py = pybind11; + // output 'raw' pixels -PyObject* view_tostring1(image_view_any const& view) +py::object view_tostring1(image_view_any const& view) { std::ostringstream ss(std::ios::out|std::ios::binary); mapnik::view_to_stream(view, ss); - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - ((const char*)ss.str().c_str(),ss.str().size()); + return py::bytes(ss.str().c_str(), ss.str().size()); } // encode (png,jpeg) -PyObject* view_tostring2(image_view_any const & view, std::string const& format) +py::object view_tostring2(image_view_any const & view, std::string const& format) { std::string s = save_to_string(view, format); - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - (s.data(),s.size()); + return py::bytes(s.data(), s.length()); } -PyObject* view_tostring3(image_view_any const & view, std::string const& format, mapnik::rgba_palette const& pal) +py::object view_tostring3(image_view_any const & view, std::string const& format, mapnik::rgba_palette const& pal) { std::string s = save_to_string(view, format, pal); - return -#if PY_VERSION_HEX >= 0x03000000 - ::PyBytes_FromStringAndSize -#else - ::PyString_FromStringAndSize -#endif - (s.data(),s.size()); + return py::bytes(s.data(), s.length()); } bool is_solid(image_view_any const& view) @@ -112,16 +87,15 @@ void save_view3(image_view_any const& view, } -void export_image_view() +void export_image_view(py::module const& m) { - using namespace boost::python; - class_("ImageView","A view into an image.",no_init) + py::class_(m, "ImageView", "A view into an image.") .def("width",&image_view_any::width) .def("height",&image_view_any::height) .def("is_solid",&is_solid) - .def("tostring",&view_tostring1) - .def("tostring",&view_tostring2) - .def("tostring",&view_tostring3) + .def("to_string",&view_tostring1) + .def("to_string",&view_tostring2) + .def("to_string",&view_tostring3) .def("save",&save_view1) .def("save",&save_view2) .def("save",&save_view3) diff --git a/src/mapnik_label_collision_detector.cpp b/src/mapnik_label_collision_detector.cpp index 0d7ab8f4e..567db60b8 100644 --- a/src/mapnik_label_collision_detector.cpp +++ b/src/mapnik_label_collision_detector.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,25 +20,14 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - #include #include +//pybind11 +#include -#include +namespace py = pybind11; using mapnik::label_collision_detector4; using mapnik::box2d; @@ -47,69 +36,64 @@ using mapnik::Map; namespace { -std::shared_ptr -create_label_collision_detector_from_extent(box2d const &extent) +std::shared_ptr create_label_collision_detector_from_extent(box2d const &extent) { return std::make_shared(extent); } -std::shared_ptr -create_label_collision_detector_from_map(Map const &m) +std::shared_ptr create_label_collision_detector_from_map (Map const &m) { double buffer = m.buffer_size(); box2d extent(-buffer, -buffer, m.width() + buffer, m.height() + buffer); return std::make_shared(extent); } -boost::python::list -make_label_boxes(std::shared_ptr det) -{ - boost::python::list boxes; +py::list make_label_boxes(std::shared_ptr det) +{ + py::list boxes; for (label_collision_detector4::query_iterator jtr = det->begin(); jtr != det->end(); ++jtr) { - boxes.append >(jtr->get().box); + boxes.append(jtr->get().box); } - return boxes; } } -void export_label_collision_detector() +void export_label_collision_detector(py::module const& m) { - using namespace boost::python; - // for overload resolution void (label_collision_detector4::*insert_box)(box2d const &) = &label_collision_detector4::insert; - class_, boost::noncopyable> - ("LabelCollisionDetector", - "Object to detect collisions between labels, used in the rendering process.", - no_init) - - .def("__init__", make_constructor(create_label_collision_detector_from_extent), - "Creates an empty collision detection object with a given extent. Note " - "that the constructor from Map objects is a sensible default and usually " - "what you want to do.\n" - "\n" - "Example:\n" - ">>> m = Map(size_x, size_y)\n" - ">>> buf_sz = m.buffer_size\n" - ">>> extent = mapnik.Box2d(-buf_sz, -buf_sz, m.width + buf_sz, m.height + buf_sz)\n" - ">>> detector = mapnik.LabelCollisionDetector(extent)") - - .def("__init__", make_constructor(create_label_collision_detector_from_map), - "Creates an empty collision detection object matching the given Map object. " - "The created detector will have the same size, including the buffer, as the " - "map object. This is usually what you want to do.\n" - "\n" - "Example:\n" - ">>> m = Map(size_x, size_y)\n" - ">>> detector = mapnik.LabelCollisionDetector(m)") - - .def("extent", &label_collision_detector4::extent, return_value_policy(), + py::class_> + (m, "LabelCollisionDetector", + "Object to detect collisions between labels, used in the rendering process.") + + .def(py::init([](box2d const& box) { + return create_label_collision_detector_from_extent(box);}), + "Creates an empty collision detection object with a given extent. Note " + "that the constructor from Map objects is a sensible default and usually " + "what you want to do.\n" + "\n" + "Example:\n" + ">>> m = Map(size_x, size_y)\n" + ">>> buf_sz = m.buffer_size\n" + ">>> extent = mapnik.Box2d(-buf_sz, -buf_sz, m.width + buf_sz, m.height + buf_sz)\n" + ">>> detector = mapnik.LabelCollisionDetector(extent)") + + .def(py::init([](mapnik::Map const& m){ + return create_label_collision_detector_from_map(m);}), + "Creates an empty collision detection object matching the given Map object. " + "The created detector will have the same size, including the buffer, as the " + "map object. This is usually what you want to do.\n" + "\n" + "Example:\n" + ">>> m = Map(size_x, size_y)\n" + ">>> detector = mapnik.LabelCollisionDetector(m)") + + .def("extent", &label_collision_detector4::extent, "Returns the total extent (bounding box) of all labels inside the detector.\n" "\n" "Example:\n" diff --git a/src/mapnik_layer.cpp b/src/mapnik_layer.cpp index 9836a2ba4..d8a2a782b 100644 --- a/src/mapnik_layer.cpp +++ b/src/mapnik_layer.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,149 +20,47 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include +//pybind11 +#include +#include +#include +#include + +namespace py = pybind11; using mapnik::layer; using mapnik::parameters; using mapnik::datasource_cache; +PYBIND11_MAKE_OPAQUE(std::vector); -struct layer_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const layer& l) - { - return boost::python::make_tuple(l.name(),l.srs()); - } - - static boost::python::tuple - getstate(const layer& l) - { - boost::python::list s; - std::vector const& style_names = l.styles(); - for (unsigned i = 0; i < style_names.size(); ++i) - { - s.append(style_names[i]); - } - return boost::python::make_tuple(l.clear_label_cache(),l.minimum_scale_denominator(),l.maximum_scale_denominator(),l.queryable(),l.datasource()->params(),l.cache_features(),s); - } - - static void - setstate (layer& l, boost::python::tuple state) - { - using namespace boost::python; - if (len(state) != 9) - { - PyErr_SetObject(PyExc_ValueError, - ("expected 9-item tuple in call to __setstate__; got %s" - % state).ptr() - ); - throw_error_already_set(); - } - - l.set_clear_label_cache(extract(state[0])); +std::vector & (mapnik::layer::*set_styles_)() = &mapnik::layer::styles; +std::vector const& (mapnik::layer::*get_styles_)() const = &mapnik::layer::styles; - l.set_minimum_scale_denominator(extract(state[1])); - - l.set_maximum_scale_denominator(extract(state[2])); - - l.set_queryable(extract(state[3])); - - mapnik::parameters params = extract(state[4]); - l.set_datasource(datasource_cache::instance().create(params)); - - boost::python::list s = extract(state[5]); - for (int i=0;i(s[i])); - } - - l.set_cache_features(extract(state[6])); - } -}; - -std::vector & (mapnik::layer::*_styles_)() = &mapnik::layer::styles; - -void set_maximum_extent(mapnik::layer & l, boost::optional > const& box) +void export_layer(py::module const& m) { - if (box) - { - l.set_maximum_extent(*box); - } - else - { - l.reset_maximum_extent(); - } -} + py::bind_vector>(m, "StyleNames", py::module_local()); -void set_buffer_size(mapnik::layer & l, boost::optional const& buffer_size) -{ - if (buffer_size) - { - l.set_buffer_size(*buffer_size); - } - else - { - l.reset_buffer_size(); - } -} - -PyObject * get_buffer_size(mapnik::layer & l) -{ - boost::optional buffer_size = l.buffer_size(); - if (buffer_size) - { -#if PY_VERSION_HEX >= 0x03000000 - return PyLong_FromLong(*buffer_size); -#else - return PyInt_FromLong(*buffer_size); -#endif - } - else - { - Py_RETURN_NONE; - } -} - -void export_layer() -{ - using namespace boost::python; - class_ >("Names") - .def(vector_indexing_suite,true >()) - ; - - class_("Layer", "A Mapnik map layer.", init >( - "Create a Layer with a named string and, optionally, an srs string.\n" - "\n" - "The srs can be either a Proj.4 epsg code ('+init=epsg:') or\n" - "of a Proj.4 literal ('+proj=').\n" - "If no srs is specified it will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n" - "\n" - "Usage:\n" - ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" - ">>> lyr\n" - "\n" - )) - - .def_pickle(layer_pickle_suite()) + py::class_(m, "Layer", "A Mapnik map layer.") + .def(py::init(), + "Create a Layer with a named string and, optionally, an srs string.\n" + "\n" + "The srs can be either a Proj epsg code ('epsg:') or\n" + "of a Proj literal ('+proj=').\n" + "If no srs is specified it will default to 'epsg:4326'\n" + "\n" + "Usage:\n" + ">>> from mapnik import Layer\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" + ">>> lyr\n" + "\n", + py::arg("name"), py::arg("srs") = mapnik::MAPNIK_GEOGRAPHIC_PROJ + ) .def("envelope",&layer::envelope, "Return the geographic envelope/bounding box." @@ -171,7 +69,7 @@ void export_layer() "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.envelope()\n" "box2d(-1.0,-1.0,0.0,0.0) # default until a datasource is loaded\n" ) @@ -188,7 +86,7 @@ void export_layer() "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.visible(1.0/1000000)\n" "True\n" ">>> lyr.active = False\n" @@ -196,14 +94,14 @@ void export_layer() "False\n" ) - .add_property("active", + .def_property("active", &layer::active, &layer::set_active, "Get/Set whether this layer is active and will be rendered (same as status property).\n" "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.active\n" "True # Active by default\n" ">>> lyr.active = False # set False to disable layer rendering\n" @@ -211,14 +109,14 @@ void export_layer() "False\n" ) - .add_property("status", + .def_property("status", &layer::active, &layer::set_active, "Get/Set whether this layer is active and will be rendered.\n" "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.status\n" "True # Active by default\n" ">>> lyr.status = False # set False to disable layer rendering\n" @@ -226,7 +124,7 @@ void export_layer() "False\n" ) - .add_property("clear_label_cache", + .def_property("clear_label_cache", &layer::clear_label_cache, &layer::set_clear_label_cache, "Get/Set whether to clear the label collision detector cache for this layer during rendering\n" @@ -237,7 +135,7 @@ void export_layer() ">>> lyr.clear_label_cache = True # set to True to clear the label collision detector cache\n" ) - .add_property("cache_features", + .def_property("cache_features", &layer::cache_features, &layer::set_cache_features, "Get/Set whether features should be cached during rendering if used between multiple styles\n" @@ -248,22 +146,22 @@ void export_layer() ">>> lyr.cache_features = True # set to True to enable feature caching\n" ) - .add_property("datasource", + .def_property("datasource", &layer::datasource, &layer::set_datasource, "The datasource attached to this layer.\n" "\n" "Usage:\n" ">>> from mapnik import Layer, Datasource\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.datasource = Datasource(type='shape',file='world_borders')\n" ">>> lyr.datasource\n" "\n" ) - .add_property("buffer_size", - &get_buffer_size, - &set_buffer_size, + .def_property("buffer_size", + &layer::buffer_size, + &layer::set_buffer_size, "Get/Set the size of buffer around layer in pixels.\n" "\n" "Usage:\n" @@ -274,23 +172,23 @@ void export_layer() "2\n" ) - .add_property("maximum_extent",make_function - (&layer::maximum_extent,return_value_policy()), - &set_maximum_extent, + .def_property("maximum_extent", + &layer::maximum_extent, + &layer::set_maximum_extent, "The maximum extent of the map.\n" "\n" "Usage:\n" ">>> m.maximum_extent = Box2d(-180,-90,180,90)\n" ) - .add_property("maximum_scale_denominator", + .def_property("maximum_scale_denominator", &layer::maximum_scale_denominator, &layer::set_maximum_scale_denominator, "Get/Set the maximum scale denominator of the layer.\n" "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.maximum_scale_denominator\n" "1.7976931348623157e+308 # default is the numerical maximum\n" ">>> lyr.maximum_scale_denominator = 1.0/1000000\n" @@ -298,14 +196,14 @@ void export_layer() "9.9999999999999995e-07\n" ) - .add_property("minimum_scale_denominator", + .def_property("minimum_scale_denominator", &layer::minimum_scale_denominator, &layer::set_minimum_scale_denominator, - "Get/Set the minimum scale demoninator of the layer.\n" + "Get/Set the minimum scale denominator of the layer.\n" "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.minimum_scale_denominator # default is 0\n" "0.0\n" ">>> lyr.minimum_scale_denominator = 1.0/1000000\n" @@ -313,14 +211,14 @@ void export_layer() "9.9999999999999995e-07\n" ) - .add_property("name", - make_function(&layer::name, return_value_policy()), + .def_property("name", + &layer::name, &layer::set_name, "Get/Set the name of the layer.\n" "\n" "Usage:\n" ">>> from mapnik import Layer\n" - ">>> lyr = Layer('My Layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = Layer('My Layer','epsg:4326')\n" ">>> lyr.name\n" "'My Layer'\n" ">>> lyr.name = 'New Name'\n" @@ -328,14 +226,14 @@ void export_layer() "'New Name'\n" ) - .add_property("queryable", + .def_property("queryable", &layer::queryable, &layer::set_queryable, "Get/Set whether this layer is queryable.\n" "\n" "Usage:\n" ">>> from mapnik import layer\n" - ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = layer('My layer','epsg:4326')\n" ">>> lyr.queryable\n" "False # Not queryable by default\n" ">>> lyr.queryable = True\n" @@ -343,36 +241,37 @@ void export_layer() "True\n" ) - .add_property("srs", - make_function(&layer::srs,return_value_policy()), + .def_property("srs", + &layer::srs, &layer::set_srs, "Get/Set the SRS of the layer.\n" "\n" "Usage:\n" ">>> from mapnik import layer\n" - ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = layer('My layer','epsg:4326')\n" ">>> lyr.srs\n" - "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n" - ">>> # set to google mercator with Proj.4 literal\n" + "'epsg:4326' # The default srs if not initialized with custom srs\n" + ">>> # set to google mercator with Proj literal\n" "... \n" - ">>> lyr.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n" + ">>> lyr.srs = 'epsg:3857'\n" ) - .add_property("group_by", - make_function(&layer::group_by,return_value_policy()), + .def_property("group_by", + &layer::group_by, &layer::set_group_by, "Get/Set the optional layer group name.\n" "\n" "More details at https://github.com/mapnik/mapnik/wiki/Grouped-rendering:\n" ) - .add_property("styles", - make_function(_styles_,return_value_policy()), + .def_property("styles", + get_styles_, + set_styles_, "The styles list attached to this layer.\n" "\n" "Usage:\n" ">>> from mapnik import layer\n" - ">>> lyr = layer('My layer','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')\n" + ">>> lyr = layer('My layer','epsg:4326')\n" ">>> lyr.styles\n" "\n" ">>> len(lyr.styles)\n" @@ -384,6 +283,6 @@ void export_layer() "'My Style'\n" ) // comparison - .def(self == self) + .def(py::self == py::self) ; } diff --git a/src/mapnik_svg.hpp b/src/mapnik_line_pattern_symbolizer.cpp similarity index 53% rename from src/mapnik_svg.hpp rename to src/mapnik_line_pattern_symbolizer.cpp index 418ee0511..77268b282 100644 --- a/src/mapnik_svg.hpp +++ b/src/mapnik_line_pattern_symbolizer.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2010 Robert Coup + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -19,38 +19,32 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ -#ifndef MAPNIK_PYTHON_BINDING_SVG_INCLUDED -#define MAPNIK_PYTHON_BINDING_SVG_INCLUDED // mapnik -#include +#include #include -#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include -namespace mapnik { -using namespace boost::python; +namespace py = pybind11; -template -std::string get_svg_transform(T& symbolizer) +void export_line_pattern_symbolizer(py::module const& m) { - return symbolizer.get_image_transform_string(); -} + using namespace python_mapnik; + using mapnik::line_pattern_symbolizer; -template -void set_svg_transform(T& symbolizer, std::string const& transform_wkt) -{ - transform_list_ptr trans_expr = mapnik::parse_transform(transform_wkt); - if (!trans_expr) - { - std::stringstream ss; - ss << "Could not parse transform from '" - << transform_wkt - << "', expected SVG transform attribute"; - throw mapnik::value_error(ss.str()); - } - symbolizer.set_image_transform(trans_expr); -} + py::class_(m, "LinePatternSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("file", + &get_property, + &set_path_property, + "File path or mapnik.PathExpression") -} // end of namespace mapnik + ; -#endif // MAPNIK_PYTHON_BINDING_SVG_INCLUDED +} diff --git a/src/mapnik_line_symbolizer.cpp b/src/mapnik_line_symbolizer.cpp new file mode 100644 index 000000000..102698bc3 --- /dev/null +++ b/src/mapnik_line_symbolizer.cpp @@ -0,0 +1,146 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include + +#include "mapnik_symbolizer.hpp" +//pybind11 +#include +#include +#include +#include +#include + +namespace py = pybind11; + +namespace { + +std::string get_stroke_dasharray(mapnik::symbolizer_base & sym) +{ + auto dash = mapnik::get(sym, mapnik::keys::stroke_dasharray); + + std::ostringstream os; + for (std::size_t i = 0; i < dash.size(); ++i) + { + os << dash[i].first << "," << dash[i].second; + if (i + 1 < dash.size()) + os << ","; + } + return os.str(); +} + +void set_stroke_dasharray(mapnik::symbolizer_base & sym, std::string str) +{ + mapnik::dash_array dash; + if (mapnik::util::parse_dasharray(str, dash)) + { + mapnik::put(sym, mapnik::keys::stroke_dasharray, dash); + } + else + { + throw std::runtime_error("Can't parse dasharray"); + } +} + +} + +void export_line_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::line_symbolizer; + + py::native_enum(m, "line_rasterizer", "enum.Enum") + .value("FULL",mapnik::line_rasterizer_enum::RASTERIZER_FULL) + .value("FAST",mapnik::line_rasterizer_enum::RASTERIZER_FAST) + .finalize() + ; + + py::native_enum(m, "stroke_linecap", "enum.Enum") + .value("BUTT_CAP",mapnik::line_cap_enum::BUTT_CAP) + .value("SQUARE_CAP",mapnik::line_cap_enum::SQUARE_CAP) + .value("ROUND_CAP",mapnik::line_cap_enum::ROUND_CAP) + .finalize() + ; + + py::native_enum(m, "stroke_linejoin", "enum.Enum") + .value("MITER_JOIN",mapnik::line_join_enum::MITER_JOIN) + .value("MITER_REVERT_JOIN",mapnik::line_join_enum::MITER_REVERT_JOIN) + .value("ROUND_JOIN",mapnik::line_join_enum::ROUND_JOIN) + .value("BEVEL_JOIN",mapnik::line_join_enum::BEVEL_JOIN) + .finalize() + ; + + py::class_(m, "LineSymbolizer") + .def(py::init<>(), "Default LineSymbolizer - 1px solid black") + .def("__hash__",hash_impl_2) + .def_property("stroke", + &get_property, + &set_color_property, + "Stroke color") + .def_property("stroke_width", + &get_property, + &set_double_property, + "Stroke width") + .def_property("stroke_opacity", + &get_property, + &set_double_property, + "Stroke opacity") + .def_property("stroke_gamma", + &get_property, + &set_double_property, + "Stroke gamma") + .def_property("stroke_gamma_method", + &get, + &set_enum_property, + "Stroke gamma method") + .def_property("line_rasterizer", + &get, + &set_enum_property, + "Line rasterizer") + .def_property("stroke_linecap", + &get, + &set_enum_property, + "Stroke linecap") + .def_property("stroke_linejoin", + &get, + &set_enum_property, + "Stroke linejoin") + .def_property("stroke_dasharray", + &get_stroke_dasharray, + &set_stroke_dasharray, + "Stroke dasharray") + .def_property("stroke_dashoffset", + &get_property, + &set_double_property, + "Stroke dashoffset") + .def_property("stroke_miterlimit", + &get_property, + &set_double_property, + "Stroke miterlimit") + + ; +} diff --git a/src/mapnik_logger.cpp b/src/mapnik_logger.cpp index 8fc7c324a..c7683c7a0 100644 --- a/src/mapnik_logger.cpp +++ b/src/mapnik_logger.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,65 +20,43 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - #include #include -#include "mapnik_enumeration.hpp" -void export_logger() +//pybind11 +#include +#include +#include + +namespace py = pybind11; + +void export_logger(py::module const& m) { using mapnik::logger; using mapnik::singleton; using mapnik::CreateStatic; - using namespace boost::python; - class_,boost::noncopyable>("Singleton",no_init) - .def("instance",&singleton::instance, - return_value_policy()) - .staticmethod("instance") - ; - enum_("severity_type") + py::native_enum(m, "severity_type", "enum.IntEnum") .value("Debug", logger::debug) .value("Warn", logger::warn) .value("Error", logger::error) .value("None", logger::none) + .finalize() ; - class_ >, - boost::noncopyable>("logger",no_init) - .def("get_severity", &logger::get_severity) - .def("set_severity", &logger::set_severity) - .def("get_object_severity", &logger::get_object_severity) - .def("set_object_severity", &logger::set_object_severity) - .def("clear_object_severity", &logger::clear_object_severity) - .def("get_format", &logger::get_format) - .def("set_format", &logger::set_format) - .def("str", &logger::str) - .def("use_file", &logger::use_file) - .def("use_console", &logger::use_console) - .staticmethod("get_severity") - .staticmethod("set_severity") - .staticmethod("get_object_severity") - .staticmethod("set_object_severity") - .staticmethod("clear_object_severity") - .staticmethod("get_format") - .staticmethod("set_format") - .staticmethod("str") - .staticmethod("use_file") - .staticmethod("use_console") + py::class_>(m, "logger") + .def_static("get_severity", &logger::get_severity) + .def_static("set_severity", &logger::set_severity) + .def_static("get_object_severity", &logger::get_object_severity) + .def_static("set_object_severity", &logger::set_object_severity) + .def_static("clear_object_severity", &logger::clear_object_severity) + .def_static("get_format", &logger::get_format) + .def_static("set_format", &logger::set_format) + .def_static("str", &logger::str) + .def_static("use_file", &logger::use_file) + .def_static("use_console", &logger::use_console) ; } diff --git a/src/mapnik_map.cpp b/src/mapnik_map.cpp index 3f3719fa7..b2b164339 100644 --- a/src/mapnik_map.cpp +++ b/src/mapnik_map.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,31 +20,25 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#include -#include -#pragma GCC diagnostic pop - -// mapnik #include #include #include +#include #include #include #include -#include "mapnik_enumeration.hpp" +#include "mapnik_value_converter.hpp" +#include "python_optional.hpp" +//pybind11 +#include +#include +#include +#include +#include + +namespace py = pybind11; using mapnik::color; using mapnik::coord; @@ -52,8 +46,14 @@ using mapnik::box2d; using mapnik::layer; using mapnik::Map; -std::vector& (Map::*layers_nonconst)() = &Map::layers; -std::vector const& (Map::*layers_const)() const = &Map::layers; +PYBIND11_MAKE_OPAQUE(std::vector); +PYBIND11_MAKE_OPAQUE(std::map); +PYBIND11_MAKE_OPAQUE(mapnik::parameters); + +namespace { +std::vector& (Map::*set_layers)() = &Map::layers; +std::vector const& (Map::*get_layers)() const = &Map::layers; +mapnik::parameters const& (Map::*params_const)() const = &Map::get_extra_parameters; mapnik::parameters& (Map::*params_nonconst)() = &Map::get_extra_parameters; void insert_style(mapnik::Map & m, std::string const& name, mapnik::feature_type_style const& style) @@ -71,8 +71,7 @@ mapnik::feature_type_style find_style(mapnik::Map const& m, std::string const& n boost::optional style = m.find_style(name); if (!style) { - PyErr_SetString(PyExc_KeyError, "Invalid style name"); - boost::python::throw_error_already_set(); + throw std::runtime_error("Invalid style name"); } return *style; } @@ -82,8 +81,7 @@ mapnik::font_set find_fontset(mapnik::Map const& m, std::string const& name) boost::optional fontset = m.find_fontset(name); if (!fontset) { - PyErr_SetString(PyExc_KeyError, "Invalid font_set name"); - boost::python::throw_error_already_set(); + throw std::runtime_error("Invalid font_set name"); } return *fontset; } @@ -93,8 +91,7 @@ mapnik::font_set find_fontset(mapnik::Map const& m, std::string const& name) mapnik::featureset_ptr query_point(mapnik::Map const& m, int index, double x, double y) { if (index < 0){ - PyErr_SetString(PyExc_IndexError, "Please provide a layer index >= 0"); - boost::python::throw_error_already_set(); + throw pybind11::index_error("Please provide a layer index >= 0"); } unsigned idx = index; return m.query_point(idx, x, y); @@ -103,8 +100,7 @@ mapnik::featureset_ptr query_point(mapnik::Map const& m, int index, double x, do mapnik::featureset_ptr query_map_point(mapnik::Map const& m, int index, double x, double y) { if (index < 0){ - PyErr_SetString(PyExc_IndexError, "Please provide a layer index >= 0"); - boost::python::throw_error_already_set(); + throw pybind11::index_error("Please provide a layer index >= 0"); } unsigned idx = index; return m.query_map_point(idx, x, y); @@ -122,31 +118,15 @@ void set_maximum_extent(mapnik::Map & m, boost::optional > } } -struct extract_style -{ - using result_type = boost::python::tuple; - result_type operator() (std::map::value_type const& val) const - { - return boost::python::make_tuple(val.first,val.second); - } -}; - -using style_extract_iterator = boost::transform_iterator; -using style_range = std::pair; -style_range _styles_ (mapnik::Map const& m) -{ - return style_range( - boost::make_transform_iterator(m.begin_styles(), extract_style()), - boost::make_transform_iterator(m.end_styles(), extract_style())); -} +} //namespace -void export_map() +void export_map(py::module const& m) { - using namespace boost::python; - + py::bind_vector>(m, "Layers", py::module_local()); + py::bind_map>(m, "Styles", py::module_local()); // aspect ratio fix modes - mapnik::enumeration_("aspect_fix_mode") + py::native_enum(m, "aspect_fix_mode", "enum.Enum") .value("GROW_BBOX", mapnik::Map::GROW_BBOX) .value("GROW_CANVAS",mapnik::Map::GROW_CANVAS) .value("SHRINK_BBOX",mapnik::Map::SHRINK_BBOX) @@ -156,35 +136,29 @@ void export_map() .value("ADJUST_CANVAS_WIDTH",mapnik::Map::ADJUST_CANVAS_WIDTH) .value("ADJUST_CANVAS_HEIGHT", mapnik::Map::ADJUST_CANVAS_HEIGHT) .value("RESPECT", mapnik::Map::RESPECT) + .finalize() ; - class_ >("Layers") - .def(vector_indexing_suite >()) - ; - - class_("StyleRange") - .def("__iter__", - boost::python::range(&style_range::first, &style_range::second)) - ; + py::class_(m, "Map","The map object.") + .def(py::init(), + "Create a Map with a width and height as integers and, optionally,\n" + "an srs string either with a Proj epsg code ('epsg:')\n" + "or with a Proj literal ('+proj=').\n" + "If no srs is specified the map will default to 'epsg:4326'\n" + "\n" + "Usage:\n" + ">>> from mapnik import Map\n" + ">>> m = Map(600,400)\n" + ">>> m\n" + "\n" + ">>> m.srs\n" + "'epsg:4326'\n", + py::arg("width"), + py::arg("height"), + py::arg("srs") = mapnik::MAPNIK_GEOGRAPHIC_PROJ + ) - class_("Map","The map object.",init >( - ( arg("width"),arg("height"),arg("srs") ), - "Create a Map with a width and height as integers and, optionally,\n" - "an srs string either with a Proj.4 epsg code ('+init=epsg:')\n" - "or with a Proj.4 literal ('+proj=').\n" - "If no srs is specified the map will default to '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map\n" - ">>> m = Map(600,400)\n" - ">>> m\n" - "\n" - ">>> m.srs\n" - "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n" - )) - - .def("append_style",insert_style, - (arg("style_name"),arg("style_object")), + .def("append_style", insert_style, "Insert a Mapnik Style onto the map by appending it.\n" "\n" "Usage:\n" @@ -193,12 +167,13 @@ void export_map() ">>> m.append_style('Style Name', sty)\n" "True # style object added to map by name\n" ">>> m.append_style('Style Name', sty)\n" - "False # you can only append styles with unique names\n" + "False # you can only append styles with unique names\n", + py::arg("style_name"), py::arg("style_object") ) - .def("append_fontset",insert_fontset, - (arg("fontset")), - "Add a FontSet to the map." + .def("append_fontset", insert_fontset, + "Add a FontSet to the map.", + py::arg("name"), py::arg("fontset") ) .def("buffered_envelope", @@ -218,8 +193,7 @@ void export_map() ) .def("envelope", - make_function(&Map::get_current_extent, - return_value_policy()), + &Map::get_current_extent, "Return the Map Box2d object\n" "and print the string representation\n" "of the current extent of the map.\n" @@ -234,26 +208,33 @@ void export_map() ) .def("find_fontset",find_fontset, - (arg("name")), - "Find a fontset by name." + "Find a fontset by name.", + py::arg("name") ) .def("find_style", find_style, - (arg("name")), "Query the Map for a style by name and return\n" "a style object if found or raise KeyError\n" "style if not found.\n" "\n" "Usage:\n" ">>> m.find_style('Style Name')\n" - "\n" - ) - - .add_property("styles", _styles_) + "\n", + py::arg("name") + ) + .def_property("styles", + (std::map const& (mapnik::Map::*)() const) + &mapnik::Map::styles, + (std::map& (mapnik::Map::*)()) + &mapnik::Map::styles, + "Returns list of Styles" + "associated with this Map object") + // .def("styles", [] (mapnik::Map const& m) { + // return py::make_iterator(m.begin_styles(), m.end_styles()); + // }, py::keep_alive<0, 1>()) .def("pan",&Map::pan, - (arg("x"),arg("y")), "Set the Map center at a given x,y location\n" "as integers in the coordinates of the pixmap or map surface.\n" "\n" @@ -263,11 +244,11 @@ void export_map() "Coord(-0.5,-0.5) # default Map center\n" ">>> m.pan(-1,-1)\n" ">>> m.envelope().center()\n" - "Coord(0.00166666666667,-0.835)\n" + "Coord(0.00166666666667,-0.835)\n", + py::arg("x"), py::arg("y") ) .def("pan_and_zoom",&Map::pan_and_zoom, - (arg("x"),arg("y"),arg("factor")), "Set the Map center at a given x,y location\n" "and zoom factor as a float.\n" "\n" @@ -279,11 +260,11 @@ void export_map() "-0.0016666666666666668\n" ">>> m.pan_and_zoom(-1,-1,0.25)\n" ">>> m.scale()\n" - "0.00062500000000000001\n" + "0.00062500000000000001\n", + py::arg("x"), py::arg("y"), py::arg("factor") ) - .def("query_map_point",query_map_point, - (arg("layer_idx"),arg("pixel_x"),arg("pixel_y")), + .def("query_map_point", query_map_point, "Query a Map Layer (by layer index) for features \n" "intersecting the given x,y location in the pixel\n" "coordinates of the rendered map image.\n" @@ -296,11 +277,11 @@ void export_map() ">>> featureset\n" "\n" ">>> featureset.features\n" - ">>> []\n" + ">>> []\n", + py::arg("layer_idx"), py::arg("pixel_x"), py::arg("pixel_y") ) - .def("query_point",query_point, - (arg("layer idx"),arg("x"),arg("y")), + .def("query_point", query_point, "Query a Map Layer (by layer index) for features \n" "intersecting the given x,y location in the coordinates\n" "of map projection.\n" @@ -313,30 +294,31 @@ void export_map() ">>> featureset\n" "\n" ">>> featureset.features\n" - ">>> []\n" + ">>> []\n", + py::arg("layer_idx"), py::arg("x"), py::arg("y") ) - .def("remove_all",&Map::remove_all, + .def("remove_all", &Map::remove_all, "Remove all Mapnik Styles and layers from the Map.\n" "\n" "Usage:\n" ">>> m.remove_all()\n" ) - .def("remove_style",&Map::remove_style, - (arg("style_name")), + .def("remove_style", &Map::remove_style, "Remove a Mapnik Style from the map.\n" "\n" "Usage:\n" - ">>> m.remove_style('Style Name')\n" + ">>> m.remove_style('Style Name')\n", + py::arg("style_name") ) - .def("resize",&Map::resize, - (arg("width"),arg("height")), + .def("resize", &Map::resize, "Resize a Mapnik Map.\n" "\n" "Usage:\n" - ">>> m.resize(64,64)\n" + ">>> m.resize(64,64)\n", + py::arg("width"), py::arg("height") ) .def("scale", &Map::scale, @@ -353,7 +335,7 @@ void export_map() ">>> m.scale_denominator()\n" ) - .def("view_transform",&Map::transform, + .def("view_transform", &Map::transform, "Return the map ViewTransform object\n" "which is used internally to convert between\n" "geographic coordinates and screen coordinates.\n" @@ -362,15 +344,15 @@ void export_map() ">>> m.view_transform()\n" ) - .def("zoom",&Map::zoom, - (arg("factor")), + .def("zoom", &Map::zoom, "Zoom in or out by a given factor.\n" "positive number larger than 1, zooms out\n" "positive number smaller than 1, zooms in\n" "\n" "Usage:\n" "\n" - ">>> m.zoom(0.25)\n" + ">>> m.zoom(0.25)\n", + py::arg("factor") ) .def("zoom_all",&Map::zoom_all, @@ -382,18 +364,20 @@ void export_map() ) .def("zoom_to_box",&Map::zoom_to_box, - (arg("Boxd2")), "Set the geographical extent of the map\n" "by specifying a Mapnik Box2d.\n" "\n" "Usage:\n" - ">>> extext = Box2d(-180.0, -90.0, 180.0, 90.0)\n" - ">>> m.zoom_to_box(extent)\n" + ">>> extent = Box2d(-180.0, -90.0, 180.0, 90.0)\n" + ">>> m.zoom_to_box(extent)\n", + py::arg("bounding_box") ) + .def_property("parameters", + params_const, + params_nonconst, + "extra parameters") - .add_property("parameters",make_function(params_nonconst,return_value_policy()),"TODO") - - .add_property("aspect_fix_mode", + .def_property("aspect_fix_mode", &Map::get_aspect_fix_mode, &Map::set_aspect_fix_mode, // TODO - how to add arg info to properties? @@ -404,8 +388,8 @@ void export_map() ">>> m.aspect_fix_mode = aspect_fix_mode.GROW_BBOX\n" ) - .add_property("background",make_function - (&Map::background,return_value_policy()), + .def_property("background", + &Map::background, &Map::set_background, "The background color of the map (same as background_color property).\n" "\n" @@ -413,8 +397,8 @@ void export_map() ">>> m.background = Color('steelblue')\n" ) - .add_property("background_color",make_function - (&Map::background,return_value_policy()), + .def_property("background_color", + &Map::background, &Map::set_background, "The background color of the map.\n" "\n" @@ -422,8 +406,8 @@ void export_map() ">>> m.background_color = Color('steelblue')\n" ) - .add_property("background_image",make_function - (&Map::background_image,return_value_policy()), + .def_property("background_image", + &Map::background_image, &Map::set_background_image, "The optional background image of the map.\n" "\n" @@ -431,7 +415,8 @@ void export_map() ">>> m.background_image = '/path/to/image.png'\n" ) - .add_property("background_image_comp_op",&Map::background_image_comp_op, + .def_property("background_image_comp_op", + &Map::background_image_comp_op, &Map::set_background_image_comp_op, "The background image compositing operation.\n" "\n" @@ -439,7 +424,8 @@ void export_map() ">>> m.background_image_comp_op = mapnik.CompositeOp.src_over\n" ) - .add_property("background_image_opacity",&Map::background_image_opacity, + .def_property("background_image_opacity", + &Map::background_image_opacity, &Map::set_background_image_opacity, "The background image opacity.\n" "\n" @@ -447,8 +433,8 @@ void export_map() ">>> m.background_image_opacity = 1.0\n" ) - .add_property("base", - make_function(&Map::base_path,return_value_policy()), + .def_property("base", + &Map::base_path, &Map::set_base_path, "The base path of the map where any files using relative \n" "paths will be interpreted as relative to.\n" @@ -457,7 +443,7 @@ void export_map() ">>> m.base_path = '.'\n" ) - .add_property("buffer_size", + .def_property("buffer_size", &Map::buffer_size, &Map::set_buffer_size, "Get/Set the size of buffer around map in pixels.\n" @@ -470,7 +456,7 @@ void export_map() "2\n" ) - .add_property("height", + .def_property("height", &Map::height, &Map::set_height, "Get/Set the height of the map in pixels.\n" @@ -484,8 +470,9 @@ void export_map() "600\n" ) - .add_property("layers",make_function - (layers_nonconst,return_value_policy()), + .def_property("layers", + get_layers, + set_layers, "The list of map layers.\n" "\n" "Usage:\n" @@ -495,8 +482,8 @@ void export_map() "\n" ) - .add_property("maximum_extent",make_function - (&Map::maximum_extent,return_value_policy()), + .def_property("maximum_extent", + &Map::maximum_extent, &set_maximum_extent, "The maximum extent of the map.\n" "\n" @@ -504,28 +491,28 @@ void export_map() ">>> m.maximum_extent = Box2d(-180,-90,180,90)\n" ) - .add_property("srs", - make_function(&Map::srs,return_value_policy()), + .def_property("srs", + &Map::srs, &Map::set_srs, - "Spatial reference in Proj.4 format.\n" + "Spatial reference in Proj format.\n" "Either an epsg code or proj literal.\n" "For example, a proj literal:\n" - "\t'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n" + "\t'epsg:4326'\n" "and a proj epsg code:\n" - "\t'+init=epsg:4326'\n" + "\t'epsg:4326'\n" "\n" "Note: using epsg codes requires the installation of\n" - "the Proj.4 'epsg' data file normally found in '/usr/local/share/proj'\n" + "the Proj 'epsg' data file normally found in '/usr/local/share/proj'\n" "\n" "Usage:\n" ">>> m.srs\n" - "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' # The default srs if not initialized with custom srs\n" + "'epsg:4326' # The default srs if not initialized with custom srs\n" ">>> # set to google mercator with Proj.4 literal\n" "... \n" - ">>> m.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'\n" + ">>> m.srs = 'epsg:3857'\n" ) - .add_property("width", + .def_property("width", &Map::width, &Map::set_width, "Get/Set the width of the map in pixels.\n" @@ -539,6 +526,6 @@ void export_map() "800\n" ) // comparison - .def(self == self) + .def(py::self == py::self) ; } diff --git a/src/mapnik_markers_symbolizer.cpp b/src/mapnik_markers_symbolizer.cpp new file mode 100644 index 000000000..d28d5363a --- /dev/null +++ b/src/mapnik_markers_symbolizer.cpp @@ -0,0 +1,62 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_markers_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::markers_symbolizer; + + py::class_(m, "MarkersSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("file", + &get_property, + &set_path_property, + "File path or mapnik.PathExpression") + .def_property("width", + &get_property, + &set_double_property, + "width or mapnik.Expression") + .def_property("height", + &get_property, + &set_double_property, + "height or mapnik.Expression") + .def_property("allow_overlap", + &get_property, + &set_boolean_property, + "Allow overlapping - True/False") + + ; + +} diff --git a/src/mapnik_palette.cpp b/src/mapnik_palette.cpp index 33bc23ae0..a1ca642f2 100644 --- a/src/mapnik_palette.cpp +++ b/src/mapnik_palette.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,25 +20,13 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - //mapnik +#include #include +//pybind11 +#include -// stl -#include +namespace py = pybind11; static std::shared_ptr make_palette( std::string const& palette, std::string const& format ) { @@ -48,22 +36,18 @@ static std::shared_ptr make_palette( std::string const& pa else if (format == "act") type = mapnik::rgba_palette::PALETTE_ACT; else - throw std::runtime_error("invalid type passed for mapnik.Palette: must be either rgba, rgb, or act"); + throw std::runtime_error("invalid type passed for `mapnik.Palette`: must be either rgba, rgb, or act"); return std::make_shared(palette, type); } -void export_palette () +void export_palette (py::module const& m) { - using namespace boost::python; + py::class_>(m, "Palette") + .def(py::init([](std::string const& palette, std::string const& format) { + return make_palette(palette, format); }), + "Creates a new color palette from a file\n", + py::arg("palette"), py::arg("type")) - class_, - boost::noncopyable >("Palette",no_init) - //, init( - // ( arg("palette"), arg("type")), - // "Creates a new color palette from a file\n" - // ) - .def( "__init__", boost::python::make_constructor(make_palette)) .def("to_string", &mapnik::rgba_palette::to_string, "Returns the palette as a string.\n" ) diff --git a/src/mapnik_parameters.cpp b/src/mapnik_parameters.cpp index fb58f3d78..a5aca2633 100644 --- a/src/mapnik_parameters.cpp +++ b/src/mapnik_parameters.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,228 +20,25 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include -#include #include -// stl -#include +#include +#include "mapnik_value_converter.hpp" +//pybind11 +#include +#include +#include +namespace py = pybind11; using mapnik::parameter; using mapnik::parameters; -struct parameter_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const parameter& p) - { - using namespace boost::python; - return boost::python::make_tuple(p.first,p.second); - } -}; - -struct parameters_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getstate(const parameters& p) - { - using namespace boost::python; - dict d; - parameters::const_iterator pos=p.begin(); - while(pos!=p.end()) - { - d[pos->first] = pos->second; - ++pos; - } - return boost::python::make_tuple(d); - } - - static void setstate(parameters& p, boost::python::tuple state) - { - using namespace boost::python; - if (len(state) != 1) - { - PyErr_SetObject(PyExc_ValueError, - ("expected 1-item tuple in call to __setstate__; got %s" - % state).ptr() - ); - throw_error_already_set(); - } - - dict d = extract(state[0]); - boost::python::list keys = d.keys(); - for (int i=0; i(keys[i]); - object obj = d[key]; - extract ex0(obj); - extract ex1(obj); - extract ex2(obj); - extract ex3(obj); - - // TODO - this is never hit - we need proper python string -> std::string to get invoked here - if (ex0.check()) - { - p[key] = ex0(); - } - else if (ex1.check()) - { - p[key] = ex1(); - } - else if (ex2.check()) - { - p[key] = ex2(); - } - else if (ex3.check()) - { - std::string buffer; - mapnik::to_utf8(ex3(),buffer); - p[key] = buffer; - } - else - { - MAPNIK_LOG_DEBUG(bindings) << "parameters_pickle_suite: Could not unpickle key=" << key; - } - } - } -}; - - -mapnik::value_holder get_params_by_key1(mapnik::parameters const& p, std::string const& key) -{ - parameters::const_iterator pos = p.find(key); - if (pos != p.end()) - { - // will be auto-converted to proper python type by `mapnik_params_to_python` - return pos->second; - } - return mapnik::value_null(); -} - -mapnik::value_holder get_params_by_key2(mapnik::parameters const& p, std::string const& key) -{ - parameters::const_iterator pos = p.find(key); - if (pos == p.end()) - { - PyErr_SetString(PyExc_KeyError, key.c_str()); - boost::python::throw_error_already_set(); - } - // will be auto-converted to proper python type by `mapnik_params_to_python` - return pos->second; -} - -mapnik::parameter get_params_by_index(mapnik::parameters const& p, int index) -{ - if (index < 0 || static_cast(index) > p.size()) - { - PyErr_SetString(PyExc_IndexError, "Index is out of range"); - throw boost::python::error_already_set(); - } - - parameters::const_iterator itr = p.begin(); - std::advance(itr, index); - if (itr != p.end()) - { - return *itr; - } - PyErr_SetString(PyExc_IndexError, "Index is out of range"); - throw boost::python::error_already_set(); -} - -unsigned get_params_size(mapnik::parameters const& p) -{ - return p.size(); -} - -void add_parameter(mapnik::parameters & p, mapnik::parameter const& param) -{ - p[param.first] = param.second; -} - -mapnik::value_holder get_param(mapnik::parameter const& p, int index) -{ - if (index == 0) - { - return p.first; - } - else if (index == 1) - { - return p.second; - } - else - { - PyErr_SetString(PyExc_IndexError, "Index is out of range"); - throw boost::python::error_already_set(); - } -} - -std::shared_ptr create_parameter(mapnik::value_unicode_string const& key, mapnik::value_holder const& value) -{ - std::string key_utf8; - mapnik::to_utf8(key, key_utf8); - return std::make_shared(key_utf8,value); -} - -bool contains(mapnik::parameters const& p, std::string const& key) -{ - parameters::const_iterator pos = p.find(key); - return pos != p.end(); -} - -// needed for Python_Unicode to std::string (utf8) conversion -std::shared_ptr create_parameter_from_string(mapnik::value_unicode_string const& key, mapnik::value_unicode_string const& ustr) +void export_parameters(py::module const& m) { - std::string key_utf8; - std::string ustr_utf8; - mapnik::to_utf8(key, key_utf8); - mapnik::to_utf8(ustr,ustr_utf8); - return std::make_shared(key_utf8, ustr_utf8); -} - -void export_parameters() -{ - using namespace boost::python; - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - - class_ >("Parameter",no_init) - .def("__init__", make_constructor(create_parameter), - "Create a mapnik.Parameter from a pair of values, the first being a string\n" - "and the second being either a string, and integer, or a float") - .def("__init__", make_constructor(create_parameter_from_string), - "Create a mapnik.Parameter from a pair of values, the first being a string\n" - "and the second being either a string, and integer, or a float") - - .def_pickle(parameter_pickle_suite()) - .def("__getitem__",get_param) - ; - - class_("Parameters",init<>()) - .def_pickle(parameters_pickle_suite()) - .def("get",get_params_by_key1) - .def("__getitem__",get_params_by_key2) - .def("__getitem__",get_params_by_index) - .def("__len__",get_params_size) - .def("__contains__",contains) - .def("append",add_parameter) - .def("iteritems",iterator()) - ; + py::bind_map(m, "Parameters", py::module_local()); } diff --git a/src/mapnik_placement_finder.cpp b/src/mapnik_placement_finder.cpp new file mode 100644 index 000000000..52b68e108 --- /dev/null +++ b/src/mapnik_placement_finder.cpp @@ -0,0 +1,189 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +namespace +{ + +template +void set_face_name(PlacementFinder & finder, std::string const& face_name) +{ + finder.defaults.format_defaults.face_name = face_name; +} + +template +std::string get_face_name(PlacementFinder const& finder) +{ + return finder.defaults.format_defaults.face_name; +} + +template +void set_text_size(PlacementFinder & finder, double text_size) +{ + finder.defaults.format_defaults.text_size = text_size; +} + +template +py::object get_text_size(PlacementFinder const& finder) +{ + return mapnik::util::apply_visitor(python_mapnik::extract_python_object<>(mapnik::keys::MAX_SYMBOLIZER_KEY), + finder.defaults.format_defaults.text_size); +} + +template +void set_fill(PlacementFinder & finder, mapnik::color const& fill) +{ + finder.defaults.format_defaults.fill = fill; +} + +template +py::object get_fill(PlacementFinder const& finder) +{ + return mapnik::util::apply_visitor(python_mapnik::extract_python_object<>(mapnik::keys::MAX_SYMBOLIZER_KEY), + finder.defaults.format_defaults.fill); +} + +template +void set_halo_fill(PlacementFinder & finder, mapnik::color const& halo_fill ) +{ + finder.defaults.format_defaults.halo_fill = halo_fill; +} + +template +py::object get_halo_fill(PlacementFinder const& finder) +{ + return mapnik::util::apply_visitor(python_mapnik::extract_python_object<>(mapnik::keys::MAX_SYMBOLIZER_KEY), + finder.defaults.format_defaults.halo_fill); +} + +template +void set_halo_radius(PlacementFinder & finder, double halo_radius) +{ + finder.defaults.format_defaults.halo_radius = halo_radius; +} + +template +py::object get_halo_radius(PlacementFinder const& finder) +{ + return mapnik::util::apply_visitor(python_mapnik::extract_python_object<>(mapnik::keys::MAX_SYMBOLIZER_KEY), + finder.defaults.format_defaults.halo_radius); +} + +template +void set_format_expr(PlacementFinder & finder, std::string const& expr) +{ + finder.defaults.set_format_tree( + std::make_shared(mapnik::parse_expression(expr))); +} + +template +std::string get_format_expr(PlacementFinder const& finder) +{ + mapnik::expression_set exprs; + finder.defaults.add_expressions(exprs); + std::string str = ""; + for (auto expr : exprs) + { + if (expr) + str += mapnik::to_expression_string(*expr); + } + return str; +} + +} + +void export_placement_finder(py::module const& m) +{ + py::class_>(m, "PlacementFinder") + .def(py::init<>(), "Default ctor") + .def_property("face_name", + &get_face_name, + &set_face_name, + "Font face name") + .def_property("text_size", + &get_text_size, + &set_text_size, + "Size of text") + .def_property("fill", + &get_fill, + &set_fill, + "Fill") + .def_property("halo_fill", + &get_halo_fill, + &set_halo_fill, + "Halo fill") + .def_property("halo_radius", + &get_halo_radius, + &set_halo_radius, + "Halo radius") + .def_property("format_expression", + &get_format_expr, + &set_format_expr, + "Format expression") + ; + +/* + py::class_>(m, "PlacementFinderSimple") + .def(py::init<>(), "Default ctor") + .def_property("face_name", + &get_face_name, + &set_face_name, + "Font face name") + .def_property("text_size", + &get_text_size, + &set_text_size, + "Size of text") + .def_property("fill", + &get_fill, + &set_fill, + "Fill") + .def_property("halo_fill", + &get_halo_fill, + &set_halo_fill, + "Halo fill") + .def_property("halo_radius", + &get_halo_radius, + &set_halo_radius, + "Halo radius") + .def_property("format_expression", + &get_format_expr, + &set_format_expr, + "Format expression") + ; +*/ +} diff --git a/src/mapnik_point_symbolizer.cpp b/src/mapnik_point_symbolizer.cpp new file mode 100644 index 000000000..cc0a14f42 --- /dev/null +++ b/src/mapnik_point_symbolizer.cpp @@ -0,0 +1,78 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include + +#include "mapnik_symbolizer.hpp" +//pybind11 +#include +#include + +namespace py = pybind11; + +void export_point_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::point_symbolizer; + + py::native_enum(m, "point_placement", "enum.Enum") + .value("CENTROID",mapnik::point_placement_enum::CENTROID_POINT_PLACEMENT) + .value("INTERIOR",mapnik::point_placement_enum::INTERIOR_POINT_PLACEMENT) + .finalize() + ; + + py::class_(m, "PointSymbolizer") + .def(py::init<>(), "Default Point Symbolizer - 4x4 black square") + .def("__hash__",hash_impl_2) + + .def_property("file", + &get_property, + &set_path_property, + "File path or mapnik.PathExpression") + + .def_property("opacity", + &get_property, + &set_double_property, + "Opacity - [0..1]") + + .def_property("allow_overlap", + &get_property, + &set_boolean_property, + "Allow overlapping - True/False") + + .def_property("ignore_placement", + &get_property, + &set_boolean_property, + "Ignore placement - True/False") + + .def_property("placement", + &get_property, + &set_enum_property, + "Point placement type CENTROID/INTERIOR") + + ; +} diff --git a/src/mapnik_polygon_pattern_symbolizer.cpp b/src/mapnik_polygon_pattern_symbolizer.cpp new file mode 100644 index 000000000..5f365fc70 --- /dev/null +++ b/src/mapnik_polygon_pattern_symbolizer.cpp @@ -0,0 +1,60 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include +#include + +namespace py = pybind11; + +void export_polygon_pattern_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::polygon_pattern_symbolizer; + + py::native_enum(m, "pattern_alignment", "enum.Enum") + .value("LOCAL", mapnik::pattern_alignment_enum::LOCAL_ALIGNMENT) + .value("GLOBAL", mapnik::pattern_alignment_enum::GLOBAL_ALIGNMENT) + .finalize() + ; + + py::class_(m, "PolygonPatternSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("file", + &get_property, + &set_path_property, + "File path or mapnik.PathExpression") + .def_property("alignment", + &get_property, + &set_enum_property, + "Pattern alignment LOCAL/GLOBAL") + ; + +} diff --git a/src/mapnik_polygon_symbolizer.cpp b/src/mapnik_polygon_symbolizer.cpp new file mode 100644 index 000000000..856c23640 --- /dev/null +++ b/src/mapnik_polygon_symbolizer.cpp @@ -0,0 +1,68 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include +#include +#include +#include + +namespace py = pybind11; + +void export_polygon_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::polygon_symbolizer; + + py::class_(m, "PolygonSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + + .def_property("fill", + &get_property, + &set_color_property, + "Fill - mapnik.Color, CSS color string or a valid mapnik.Expression") + + .def_property("fill_opacity", + &get_property, + &set_double_property, + "Fill opacity - [0-1] or a valid mapnik.Expression") + + .def_property("gamma", + &get_property, + &set_double_property, + "Fill gamma") + + .def_property("gamma_method", + &get_property, + &set_enum_property, + "Fill gamma method") + ; + +} diff --git a/src/mapnik_proj_transform.cpp b/src/mapnik_proj_transform.cpp index c4b009127..6abb21dcf 100644 --- a/src/mapnik_proj_transform.cpp +++ b/src/mapnik_proj_transform.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,44 +20,22 @@ * *****************************************************************************/ -#include - - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include -#include - +#include // stl #include +//pybind11 +#include +namespace py = pybind11; using mapnik::proj_transform; using mapnik::projection; -struct proj_transform_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const proj_transform& p) - { - using namespace boost::python; - return boost::python::make_tuple(p.source(),p.dest()); - } -}; - namespace { mapnik::coord2d forward_transform_c(mapnik::proj_transform& t, mapnik::coord2d const& c) @@ -68,7 +46,7 @@ mapnik::coord2d forward_transform_c(mapnik::proj_transform& t, mapnik::coord2d c if (!t.forward(x,y,z)) { std::ostringstream s; s << "Failed to forward project " - << "from " << t.source().params() << " to: " << t.dest().params(); + << t.definition(); throw std::runtime_error(s.str()); } return mapnik::coord2d(x,y); @@ -82,7 +60,7 @@ mapnik::coord2d backward_transform_c(mapnik::proj_transform& t, mapnik::coord2d if (!t.backward(x,y,z)) { std::ostringstream s; s << "Failed to back project " - << "from " << t.dest().params() << " to: " << t.source().params(); + << t.definition(); throw std::runtime_error(s.str()); } return mapnik::coord2d(x,y); @@ -94,7 +72,7 @@ mapnik::box2d forward_transform_env(mapnik::proj_transform& t, mapnik::b if (!t.forward(new_box)) { std::ostringstream s; s << "Failed to forward project " - << "from " << t.source().params() << " to: " << t.dest().params(); + << t.definition(); throw std::runtime_error(s.str()); } return new_box; @@ -106,7 +84,7 @@ mapnik::box2d backward_transform_env(mapnik::proj_transform& t, mapnik:: if (!t.backward(new_box)){ std::ostringstream s; s << "Failed to back project " - << "from " << t.dest().params() << " to: " << t.source().params(); + << t.definition(); throw std::runtime_error(s.str()); } return new_box; @@ -118,7 +96,7 @@ mapnik::box2d forward_transform_env_p(mapnik::proj_transform& t, mapnik: if (!t.forward(new_box,points)) { std::ostringstream s; s << "Failed to forward project " - << "from " << t.source().params() << " to: " << t.dest().params(); + << t.definition(); throw std::runtime_error(s.str()); } return new_box; @@ -130,7 +108,7 @@ mapnik::box2d backward_transform_env_p(mapnik::proj_transform& t, mapnik if (!t.backward(new_box,points)){ std::ostringstream s; s << "Failed to back project " - << "from " << t.dest().params() << " to: " << t.source().params(); + << t.definition(); throw std::runtime_error(s.str()); } return new_box; @@ -138,18 +116,18 @@ mapnik::box2d backward_transform_env_p(mapnik::proj_transform& t, mapnik } -void export_proj_transform () +void export_proj_transform (py::module const& m) { - using namespace boost::python; - - class_("ProjTransform", init< projection const&, projection const& >()) - .def_pickle(proj_transform_pickle_suite()) + py::class_(m, "ProjTransform") + .def(py::init(), + "Constructs ProjTransform object") .def("forward", forward_transform_c) .def("backward",backward_transform_c) .def("forward", forward_transform_env) .def("backward",backward_transform_env) .def("forward", forward_transform_env_p) .def("backward",backward_transform_env_p) + .def("definition",&proj_transform::definition) ; } diff --git a/src/mapnik_projection.cpp b/src/mapnik_projection.cpp index 15b62a661..c03f1c7b3 100644 --- a/src/mapnik_projection.cpp +++ b/src/mapnik_projection.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,37 +20,21 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - // mapnik +#include #include -#include +#include #include +//pybind11 +#include +#include using mapnik::projection; -struct projection_pickle_suite : boost::python::pickle_suite -{ - static boost::python::tuple - getinitargs(const projection& p) - { - using namespace boost::python; - return boost::python::make_tuple(p.params()); - } -}; +namespace py = pybind11; namespace { + mapnik::coord2d forward_pt(mapnik::coord2d const& pt, mapnik::projection const& prj) { @@ -95,32 +79,42 @@ mapnik::box2d inverse_env(mapnik::box2d const & box, } -void export_projection () +void export_projection (py::module& m) { - using namespace boost::python; - - class_("Projection", "Represents a map projection.",init( - (arg("proj4_string")), - "Constructs a new projection from its PROJ.4 string representation.\n" - "\n" - "The constructor will throw a RuntimeError in case the projection\n" - "cannot be initialized.\n" - ) - ) - .def_pickle(projection_pickle_suite()) - .def ("params", make_function(&projection::params, - return_value_policy()), - "Returns the PROJ.4 string for this projection.\n") - .def ("expanded",&projection::expanded, - "normalize PROJ.4 definition by expanding +init= syntax\n") - .add_property ("geographic", &projection::is_geographic, - "This property is True if the projection is a geographic projection\n" - "(i.e. it uses lon/lat coordinates)\n") + py::class_(m, "Projection", "Represents a map projection.") + .def(py::init(), + "Constructs a new projection from its PROJ string representation.\n" + "\n" + "The constructor will throw a RuntimeError in case the projection\n" + "cannot be initialized.\n", + py::arg("proj_string") + ) + .def(py::pickle( + [] (projection const& p) { // __getstate__ + return py::make_tuple(p.params()); + }, + [] (py::tuple t) { // __setstate__ + if (t.size() != 1) + throw std::runtime_error("Invalid state!"); + projection p(t[0].cast()); + return p; + })) + .def("params", &projection::params, + "Returns the PROJ string for this projection.\n") + .def("definition",&projection::definition, + "Return projection definition\n") + .def("description", &projection::description, + "Returns projection description") + .def_property_readonly("geographic", &projection::is_geographic, + "This property is True if the projection is a geographic projection\n" + "(i.e. it uses lon/lat coordinates)\n") + .def_property_readonly("area_of_use", &projection::area_of_use, + "This property returns projection area of use in lonlat WGS84\n") ; - def("forward_",&forward_pt); - def("inverse_",&inverse_pt); - def("forward_",&forward_env); - def("inverse_",&inverse_env); + m.def("forward_", &forward_pt); + m.def("inverse_", &inverse_pt); + m.def("forward_", &forward_env); + m.def("inverse_", &inverse_env); } diff --git a/src/mapnik_python.cpp b/src/mapnik_python.cpp index 3cc80cc23..5a9bf1326 100644 --- a/src/mapnik_python.cpp +++ b/src/mapnik_python.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,178 +20,55 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" -#include "python_to_value.hpp" -#include // for keywords, arg, etc -#include -#include // for def -#include -#include // for none -#include // for dict -#include -#include // for list -#include // for BOOST_PYTHON_MODULE -#include // for get_managed_object -#include -#include -#pragma GCC diagnostic pop - -// stl -#include -#include - -void export_color(); -void export_coord(); -void export_layer(); -void export_parameters(); -void export_envelope(); -void export_query(); -void export_geometry(); -void export_palette(); -void export_image(); -void export_image_view(); -void export_gamma_method(); -void export_scaling_method(); -#if defined(GRID_RENDERER) -void export_grid(); -void export_grid_view(); -#endif -void export_map(); -void export_python(); -void export_expression(); -void export_rule(); -void export_style(); -void export_feature(); -void export_featureset(); -void export_fontset(); -void export_datasource(); -void export_datasource_cache(); -void export_symbolizer(); -void export_markers_symbolizer(); -void export_point_symbolizer(); -void export_line_symbolizer(); -void export_line_pattern_symbolizer(); -void export_polygon_symbolizer(); -void export_building_symbolizer(); -void export_polygon_pattern_symbolizer(); -void export_raster_symbolizer(); -void export_text_placement(); -void export_shield_symbolizer(); -void export_debug_symbolizer(); -void export_group_symbolizer(); -void export_font_engine(); -void export_projection(); -void export_proj_transform(); -void export_view_transform(); -void export_raster_colorizer(); -void export_label_collision_detector(); -void export_logger(); - #include #include +#include +#include #include #include #include -#include -#include #include -#include -#include -#include +#include +#include #include +#include +#include "mapnik_value_converter.hpp" +#include "python_to_value.hpp" + #if defined(GRID_RENDERER) #include "python_grid_utils.hpp" #endif -#include "mapnik_value_converter.hpp" -#include "mapnik_enumeration_wrapper_converter.hpp" -#include "mapnik_threads.hpp" -#include "python_optional.hpp" -#include #if defined(SHAPE_MEMORY_MAPPED_FILE) #include #endif - #if defined(SVG_RENDERER) #include #endif - -namespace mapnik { - class font_set; - class layer; - class color; - class label_collision_detector4; -} -void clear_cache() -{ - mapnik::marker_cache::instance().clear(); -#if defined(SHAPE_MEMORY_MAPPED_FILE) - mapnik::mapped_memory_cache::instance().clear(); -#endif -} - #if defined(HAVE_CAIRO) #include #include #include #endif -#if defined(HAVE_PYCAIRO) -#include -#include -#include -static Pycairo_CAPI_t *Pycairo_CAPI; -static void *extract_surface(PyObject* op) -{ - if (PyObject_TypeCheck(op, const_cast(Pycairo_CAPI->Surface_Type))) - { - return op; - } - else - { - return 0; - } -} +//stl +#include +#include -static void *extract_context(PyObject* op) -{ - if (PyObject_TypeCheck(op, const_cast(Pycairo_CAPI->Context_Type))) - { - return op; - } - else - { - return 0; - } -} +//pybind11 +#include + +namespace py = pybind11; -void register_cairo() +namespace { +void clear_cache() { -#if PY_MAJOR_VERSION >= 3 - Pycairo_CAPI = (Pycairo_CAPI_t*) PyCapsule_Import(const_cast("cairo.CAPI"), 0); -#else - Pycairo_CAPI = (Pycairo_CAPI_t*) PyCObject_Import(const_cast("cairo"), const_cast("CAPI")); + mapnik::marker_cache::instance().clear(); +#if defined(SHAPE_MEMORY_MAPPED_FILE) + mapnik::mapped_memory_cache::instance().clear(); #endif - if (Pycairo_CAPI == nullptr) return; - - boost::python::converter::registry::insert(&extract_surface, boost::python::type_id()); - boost::python::converter::registry::insert(&extract_context, boost::python::type_id()); } -#endif - -using mapnik::python_thread; -using mapnik::python_unblock_auto_block; -#ifdef MAPNIK_DEBUG -bool python_thread::thread_support = true; -#endif -boost::thread_specific_ptr python_thread::state; struct agg_renderer_visitor_1 { @@ -310,13 +187,13 @@ void render(mapnik::Map const& map, unsigned offset_x = 0u, unsigned offset_y = 0u) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::util::apply_visitor(agg_renderer_visitor_1(map, scale_factor, offset_x, offset_y), image); } void render_with_vars(mapnik::Map const& map, mapnik::image_any& image, - boost::python::dict const& d, + py::dict const& d, double scale_factor = 1.0, unsigned offset_x = 0u, unsigned offset_y = 0u) @@ -324,7 +201,7 @@ void render_with_vars(mapnik::Map const& map, mapnik::attributes vars = mapnik::dict2attr(d); mapnik::request req(map.width(),map.height(),map.get_current_extent()); req.set_buffer_size(map.buffer_size()); - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::util::apply_visitor(agg_renderer_visitor_3(map, req, vars, scale_factor, offset_x, offset_y), image); } @@ -336,7 +213,7 @@ void render_with_detector( unsigned offset_x = 0u, unsigned offset_y = 0u) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::util::apply_visitor(agg_renderer_visitor_2(map, detector, scale_factor, offset_x, offset_y), image); } @@ -356,7 +233,7 @@ void render_layer2(mapnik::Map const& map, throw std::runtime_error(s.str()); } - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::layer const& layer = layers[layer_idx]; std::set names; mapnik::util::apply_visitor(agg_renderer_visitor_4(map, scale_factor, offset_x, offset_y, layer, names), image); @@ -370,7 +247,7 @@ void render3(mapnik::Map const& map, unsigned offset_x = 0, unsigned offset_y = 0) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_surface_ptr surface(cairo_surface_reference(py_surface->surface), mapnik::cairo_surface_closer()); mapnik::cairo_renderer ren(map,mapnik::create_context(surface),scale_factor,offset_x,offset_y); ren.apply(); @@ -378,7 +255,7 @@ void render3(mapnik::Map const& map, void render4(mapnik::Map const& map, PycairoSurface* py_surface) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_surface_ptr surface(cairo_surface_reference(py_surface->surface), mapnik::cairo_surface_closer()); mapnik::cairo_renderer ren(map,mapnik::create_context(surface)); ren.apply(); @@ -390,7 +267,7 @@ void render5(mapnik::Map const& map, unsigned offset_x = 0, unsigned offset_y = 0) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_ptr context(cairo_reference(py_context->ctx), mapnik::cairo_closer()); mapnik::cairo_renderer ren(map,context,scale_factor,offset_x, offset_y); ren.apply(); @@ -398,7 +275,7 @@ void render5(mapnik::Map const& map, void render6(mapnik::Map const& map, PycairoContext* py_context) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_ptr context(cairo_reference(py_context->ctx), mapnik::cairo_closer()); mapnik::cairo_renderer ren(map,context); ren.apply(); @@ -408,7 +285,7 @@ void render_with_detector2( PycairoContext* py_context, std::shared_ptr detector) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_ptr context(cairo_reference(py_context->ctx), mapnik::cairo_closer()); mapnik::cairo_renderer ren(map,context,detector); ren.apply(); @@ -422,7 +299,7 @@ void render_with_detector3( unsigned offset_x = 0u, unsigned offset_y = 0u) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_ptr context(cairo_reference(py_context->ctx), mapnik::cairo_closer()); mapnik::cairo_renderer ren(map,context,detector,scale_factor,offset_x,offset_y); ren.apply(); @@ -433,7 +310,7 @@ void render_with_detector4( PycairoSurface* py_surface, std::shared_ptr detector) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_surface_ptr surface(cairo_surface_reference(py_surface->surface), mapnik::cairo_surface_closer()); mapnik::cairo_renderer ren(map, mapnik::create_context(surface), detector); ren.apply(); @@ -447,7 +324,7 @@ void render_with_detector5( unsigned offset_x = 0u, unsigned offset_y = 0u) { - python_unblock_auto_block b; + py::gil_scoped_release release; mapnik::cairo_surface_ptr surface(cairo_surface_reference(py_surface->surface), mapnik::cairo_surface_closer()); mapnik::cairo_renderer ren(map, mapnik::create_context(surface), detector, scale_factor, offset_x, offset_y); ren.apply(); @@ -525,8 +402,7 @@ void render_to_file2(mapnik::Map const& map,std::string const& filename) void render_to_file3(mapnik::Map const& map, std::string const& filename, std::string const& format, - double scale_factor = 1.0 - ) + double scale_factor = 1.0) { if (format == "svg-ng") { @@ -586,6 +462,31 @@ void standard_error_translator(std::exception const & ex) PyErr_SetString(PyExc_RuntimeError, ex.what()); } +// indicator for pycairo support in the python bindings +bool has_pycairo() +{ +#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) +#if PY_MAJOR_VERSION >= 3 + Pycairo_CAPI = (Pycairo_CAPI_t*) PyCapsule_Import(const_cast("cairo.CAPI"), 0); +#else + Pycairo_CAPI = (Pycairo_CAPI_t*) PyCObject_Import(const_cast("cairo"), const_cast("CAPI")); +#endif + if (Pycairo_CAPI == nullptr){ + /* + Case where pycairo support has been compiled into + mapnik but at runtime the cairo python module + is unable to be imported and therefore Pycairo surfaces + and contexts cannot be passed to mapnik.render() + */ + return false; + } + return true; +#else + return false; +#endif +} + + unsigned mapnik_version() { return MAPNIK_VERSION; @@ -596,9 +497,9 @@ std::string mapnik_version_string() return MAPNIK_VERSION_STRING; } -bool has_proj4() +bool has_proj() { -#if defined(MAPNIK_USE_PROJ4) +#if defined(MAPNIK_USE_PROJ) return true; #else return false; @@ -623,7 +524,7 @@ bool has_grid_renderer() #endif } -bool has_jpeg() +constexpr bool has_jpeg() { #if defined(HAVE_JPEG) return true; @@ -632,7 +533,7 @@ bool has_jpeg() #endif } -bool has_png() +constexpr bool has_png() { #if defined(HAVE_PNG) return true; @@ -655,8 +556,8 @@ bool has_webp() #if defined(HAVE_WEBP) return true; #else - return false; -#endif + return false; + #endif } // indicator for cairo rendering support inside libmapnik @@ -669,226 +570,154 @@ bool has_cairo() #endif } -// indicator for pycairo support in the python bindings -bool has_pycairo() -{ -#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) -#if PY_MAJOR_VERSION >= 3 - Pycairo_CAPI = (Pycairo_CAPI_t*) PyCapsule_Import(const_cast("cairo.CAPI"), 0); -#else - Pycairo_CAPI = (Pycairo_CAPI_t*) PyCObject_Import(const_cast("cairo"), const_cast("CAPI")); -#endif - if (Pycairo_CAPI == nullptr){ - /* - Case where pycairo support has been compiled into - mapnik but at runtime the cairo python module - is unable to be imported and therefore Pycairo surfaces - and contexts cannot be passed to mapnik.render() - */ - return false; - } - return true; -#else - return false; -#endif -} - - -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -BOOST_PYTHON_FUNCTION_OVERLOADS(load_map_overloads, load_map, 2, 4) -BOOST_PYTHON_FUNCTION_OVERLOADS(load_map_string_overloads, load_map_string, 2, 4) -BOOST_PYTHON_FUNCTION_OVERLOADS(save_map_overloads, save_map, 2, 3) -BOOST_PYTHON_FUNCTION_OVERLOADS(save_map_to_string_overloads, save_map_to_string, 1, 2) -BOOST_PYTHON_FUNCTION_OVERLOADS(render_overloads, render, 2, 5) -BOOST_PYTHON_FUNCTION_OVERLOADS(render_with_detector_overloads, render_with_detector, 3, 6) -#pragma GCC diagnostic pop - -BOOST_PYTHON_MODULE(_mapnik) -{ - - using namespace boost::python; - - using mapnik::load_map; - using mapnik::load_map_string; - using mapnik::save_map; - using mapnik::save_map_to_string; - - register_exception_translator(&standard_error_translator); - register_exception_translator(&out_of_range_error_translator); - register_exception_translator(&value_error_translator); - register_exception_translator(&runtime_error_translator); -#if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) - register_cairo(); -#endif - export_query(); - export_geometry(); - export_feature(); - export_featureset(); - export_fontset(); - export_datasource(); - export_parameters(); - export_color(); - export_envelope(); - export_palette(); - export_image(); - export_image_view(); - export_gamma_method(); - export_scaling_method(); +} // namespace + + +void export_color(py::module const&); +void export_composite_modes(py::module const&); +void export_coord(py::module const&); +void export_envelope(py::module const&); +void export_gamma_method(py::module const&); +void export_geometry(py::module const&); +void export_feature(py::module const&); +void export_featureset(py::module const&); +void export_font_engine(py::module const&); +void export_fontset(py::module const&); +void export_expression(py::module const&); +void export_datasource(py::module&); // non-const because of m.def(..) +void export_datasource_cache(py::module const&); #if defined(GRID_RENDERER) - export_grid(); - export_grid_view(); +void export_grid(py::module const&); +void export_grid_view(py::module const&); #endif - export_expression(); - export_rule(); - export_style(); - export_layer(); - export_datasource_cache(); - export_symbolizer(); - export_markers_symbolizer(); - export_point_symbolizer(); - export_line_symbolizer(); - export_line_pattern_symbolizer(); - export_polygon_symbolizer(); - export_building_symbolizer(); - export_polygon_pattern_symbolizer(); - export_raster_symbolizer(); - export_text_placement(); - export_shield_symbolizer(); - export_debug_symbolizer(); - export_group_symbolizer(); - export_font_engine(); - export_projection(); - export_proj_transform(); - export_view_transform(); - export_coord(); - export_map(); - export_raster_colorizer(); - export_label_collision_detector(); - export_logger(); - - def("clear_cache", &clear_cache, - "\n" - "Clear all global caches of markers and mapped memory regions.\n" - "\n" - "Usage:\n" - ">>> from mapnik import clear_cache\n" - ">>> clear_cache()\n" - ); - - def("render_to_file",&render_to_file1, - "\n" - "Render Map to file using explicit image type.\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, render_to_file, load_map\n" - ">>> m = Map(256,256)\n" - ">>> load_map(m,'mapfile.xml')\n" - ">>> render_to_file(m,'image32bit.png','png')\n" - "\n" - "8 bit (paletted) PNG can be requested with 'png256':\n" - ">>> render_to_file(m,'8bit_image.png','png256')\n" - "\n" - "JPEG quality can be controlled by adding a suffix to\n" - "'jpeg' between 0 and 100 (default is 85):\n" - ">>> render_to_file(m,'top_quality.jpeg','jpeg100')\n" - ">>> render_to_file(m,'medium_quality.jpeg','jpeg50')\n" - ); - - def("render_to_file",&render_to_file2, - "\n" - "Render Map to file (type taken from file extension)\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, render_to_file, load_map\n" - ">>> m = Map(256,256)\n" - ">>> render_to_file(m,'image.jpeg')\n" - "\n" - ); - - def("render_to_file",&render_to_file3, - "\n" - "Render Map to file using explicit image type and scale factor.\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, render_to_file, load_map\n" - ">>> m = Map(256,256)\n" - ">>> scale_factor = 4\n" - ">>> render_to_file(m,'image.jpeg',scale_factor)\n" - "\n" - ); - - def("render_tile_to_file",&render_tile_to_file, - "\n" - "TODO\n" - "\n" - ); - - def("render_with_vars",&render_with_vars, - (arg("map"), - arg("image"), - arg("vars"), - arg("scale_factor")=1.0, - arg("offset_x")=0, - arg("offset_y")=0 - ) - ); - - def("render", &render, render_overloads( - "\n" - "Render Map to an AGG image_any using offsets\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, Image, render, load_map\n" - ">>> m = Map(256,256)\n" - ">>> load_map(m,'mapfile.xml')\n" - ">>> im = Image(m.width,m.height)\n" - ">>> scale_factor=2.0\n" - ">>> offset = [100,50]\n" - ">>> render(m,im)\n" - ">>> render(m,im,scale_factor)\n" - ">>> render(m,im,scale_factor,offset[0],offset[1])\n" - "\n" - )); - - def("render_with_detector", &render_with_detector, render_with_detector_overloads( - "\n" - "Render Map to an AGG image_any using a pre-constructed detector.\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, Image, LabelCollisionDetector, render_with_detector, load_map\n" - ">>> m = Map(256,256)\n" - ">>> load_map(m,'mapfile.xml')\n" - ">>> im = Image(m.width,m.height)\n" - ">>> detector = LabelCollisionDetector(m)\n" - ">>> render_with_detector(m, im, detector)\n" - )); - - def("render_layer", &render_layer2, - (arg("map"), - arg("image"), - arg("layer"), - arg("scale_factor")=1.0, - arg("offset_x")=0, - arg("offset_y")=0 - ) - ); - +void export_image(py::module const&); +void export_image_view(py::module const&); +void export_layer(py::module const&); +void export_map(py::module const&); +void export_projection(py::module&); // non-const because of m.def(..) +void export_proj_transform(py::module const&); +void export_query(py::module const&); +void export_rule(py::module const&); +void export_symbolizer(py::module const&); +void export_polygon_symbolizer(py::module const&); +void export_line_symbolizer(py::module const&); +void export_point_symbolizer(py::module const&); +void export_style(py::module const&); +void export_logger(py::module const&); +void export_placement_finder(py::module const&); +void export_text_symbolizer(py::module const&); +void export_debug_symbolizer(py::module const&); +void export_markers_symbolizer(py::module const&); +void export_polygon_pattern_symbolizer(py::module const&); +void export_line_pattern_symbolizer(py::module const&); +void export_raster_symbolizer(py::module const&); +void export_palette(py::module const&); +void export_parameters(py::module const&); +void export_raster_colorizer(py::module const&); +void export_scaling_method(py::module const&); +void export_label_collision_detector(py::module const& m); +void export_dot_symbolizer(py::module const&); +void export_shield_symbolizer(py::module const&); +void export_group_symbolizer(py::module const&); +void export_building_symbolizer(py::module const&); + +using mapnik::load_map; +using mapnik::load_map_string; +using mapnik::save_map; +using mapnik::save_map_to_string; + + +PYBIND11_MODULE(_mapnik, m) { + export_color(m); + export_composite_modes(m); + export_coord(m); + export_envelope(m); + export_geometry(m); + export_gamma_method(m); + export_feature(m); + export_featureset(m); + export_font_engine(m); + export_fontset(m); + export_expression(m); + export_datasource(m); + export_datasource_cache(m); #if defined(GRID_RENDERER) - def("render_layer", &mapnik::render_layer_for_grid, - (arg("map"), - arg("grid"), - arg("layer"), - arg("fields")=boost::python::list(), - arg("scale_factor")=1.0, - arg("offset_x")=0, - arg("offset_y")=0 - ) - ); + export_grid(m); + export_grid_view(m); #endif + export_image(m); + export_image_view(m); + export_layer(m); + export_map(m); + export_projection(m); + export_proj_transform(m); + export_query(m); + export_rule(m); + export_symbolizer(m); + export_polygon_symbolizer(m); + export_line_symbolizer(m); + export_point_symbolizer(m); + export_style(m); + export_logger(m); + export_placement_finder(m); + export_text_symbolizer(m); + export_palette(m); + export_parameters(m); + export_debug_symbolizer(m); + export_markers_symbolizer(m); + export_polygon_pattern_symbolizer(m); + export_line_pattern_symbolizer(m); + export_raster_symbolizer(m); + export_raster_colorizer(m); + export_scaling_method(m); + export_label_collision_detector(m); + export_dot_symbolizer(m); + export_shield_symbolizer(m); + export_group_symbolizer(m); + export_building_symbolizer(m); + + // + m.def("version", &mapnik_version,"Get the Mapnik version number"); + m.def("version_string", &mapnik_version_string,"Get the Mapnik version string"); + m.def("has_proj", &has_proj, "Get proj status"); + m.def("has_jpeg", &has_jpeg, "Get jpeg read/write support status"); + m.def("has_png", &has_png, "Get png read/write support status"); + m.def("has_tiff", &has_tiff, "Get tiff read/write support status"); + m.def("has_webp", &has_webp, "Get webp read/write support status"); + m.def("has_svg_renderer", &has_svg_renderer, "Get svg_renderer status"); + m.def("has_grid_renderer", &has_grid_renderer, "Get grid_renderer status"); + m.def("has_cairo", &has_cairo, "Get cairo library status"); + + m.def("load_map", &load_map, + py::arg("Map"), + py::arg("filename"), + py::arg("strict")=false, + py::arg("base_path") = "" ); + + m.def("load_map_from_string", &load_map_string, + py::arg("Map"), + py::arg("str"), + py::arg("strict")=false, + py::arg("base_path") = "" ); + + // render + m.def("render", &render, + py::arg("Map"), + py::arg("image"), + py::arg("scale_factor") = 1.0, + py::arg("offset_x") = 0, + py::arg("offset_y") = 0); + + m.def("render_with_detector", &render_with_detector, + py::arg("Map"), + py::arg("image"), + py::arg("detector"), + py::arg("scale_factor") = 1.0, + py::arg("offset_x") = 0, + py::arg("offset_y") = 0); #if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) - def("render",&render3, + m.def("render",&render3, "\n" "Render Map to Cairo Surface using offsets\n" "\n" @@ -902,7 +731,7 @@ BOOST_PYTHON_MODULE(_mapnik) "\n" ); - def("render",&render4, + m.def("render",&render4, "\n" "Render Map to Cairo Surface\n" "\n" @@ -916,7 +745,7 @@ BOOST_PYTHON_MODULE(_mapnik) "\n" ); - def("render",&render5, + m.def("render",&render5, "\n" "Render Map to Cairo Context using offsets\n" "\n" @@ -930,7 +759,7 @@ BOOST_PYTHON_MODULE(_mapnik) "\n" ); - def("render",&render6, + m.def("render",&render6, "\n" "Render Map to Cairo Context\n" "\n" @@ -944,7 +773,7 @@ BOOST_PYTHON_MODULE(_mapnik) "\n" ); - def("render_with_detector", &render_with_detector2, + m.def("render_with_detector", &render_with_detector2, "\n" "Render Map to Cairo Context using a pre-constructed detector.\n" "\n" @@ -959,7 +788,7 @@ BOOST_PYTHON_MODULE(_mapnik) ">>> render_with_detector(m, ctx, detector)\n" ); - def("render_with_detector", &render_with_detector3, + m.def("render_with_detector", &render_with_detector3, "\n" "Render Map to Cairo Context using a pre-constructed detector, scale and offsets.\n" "\n" @@ -974,7 +803,7 @@ BOOST_PYTHON_MODULE(_mapnik) ">>> render_with_detector(m, ctx, detector, 1, 1, 1)\n" ); - def("render_with_detector", &render_with_detector4, + m.def("render_with_detector", &render_with_detector4, "\n" "Render Map to Cairo Surface using a pre-constructed detector.\n" "\n" @@ -988,7 +817,7 @@ BOOST_PYTHON_MODULE(_mapnik) ">>> render_with_detector(m, surface, detector)\n" ); - def("render_with_detector", &render_with_detector5, + m.def("render_with_detector", &render_with_detector5, "\n" "Render Map to Cairo Surface using a pre-constructed detector, scale and offsets.\n" "\n" @@ -1001,73 +830,384 @@ BOOST_PYTHON_MODULE(_mapnik) ">>> detector = LabelCollisionDetector(m)\n" ">>> render_with_detector(m, surface, detector, 1, 1, 1)\n" ); - #endif - def("scale_denominator", &scale_denominator, - (arg("map"),arg("is_geographic")), - "\n" - "Return the Map Scale Denominator.\n" - "Also available as Map.scale_denominator()\n" - "\n" - "Usage:\n" - "\n" - ">>> from mapnik import Map, Projection, scale_denominator, load_map\n" - ">>> m = Map(256,256)\n" - ">>> load_map(m,'mapfile.xml')\n" - ">>> scale_denominator(m,Projection(m.srs).geographic)\n" - "\n" + m.def("render_layer", &render_layer2, + py::arg("map"), + py::arg("image"), + py::arg("layer"), + py::arg("scale_factor")=1.0, + py::arg("offset_x")=0, + py::arg("offset_y")=0 ); - def("load_map", &load_map, load_map_overloads()); - - def("load_map_from_string", &load_map_string, load_map_string_overloads()); - - def("save_map", &save_map, save_map_overloads()); -/* - "\n" - "Save Map object to XML file\n" - "\n" - "Usage:\n" - ">>> from mapnik import Map, load_map, save_map\n" - ">>> m = Map(256,256)\n" - ">>> load_map(m,'mapfile_wgs84.xml')\n" - ">>> m.srs\n" - "'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'\n" - ">>> m.srs = '+init=espg:3395'\n" - ">>> save_map(m,'mapfile_mercator.xml')\n" - "\n" - ); -*/ - - def("save_map_to_string", &save_map_to_string, save_map_to_string_overloads()); - def("mapnik_version", &mapnik_version,"Get the Mapnik version number"); - def("mapnik_version_string", &mapnik_version_string,"Get the Mapnik version string"); - def("has_proj4", &has_proj4, "Get proj4 status"); - def("has_jpeg", &has_jpeg, "Get jpeg read/write support status"); - def("has_png", &has_png, "Get png read/write support status"); - def("has_tiff", &has_tiff, "Get tiff read/write support status"); - def("has_webp", &has_webp, "Get webp read/write support status"); - def("has_svg_renderer", &has_svg_renderer, "Get svg_renderer status"); - def("has_grid_renderer", &has_grid_renderer, "Get grid_renderer status"); - def("has_cairo", &has_cairo, "Get cairo library status"); - def("has_pycairo", &has_pycairo, "Get pycairo module status"); - - python_optional(); - python_optional(); - python_optional >(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - python_optional(); - register_ptr_to_python(); - register_ptr_to_python(); - to_python_converter(); - to_python_converter(); - to_python_converter(); +#if defined(GRID_RENDERER) + m.def("render_layer", &mapnik::render_layer_for_grid, + py::arg("map"), + py::arg("grid"), + py::arg("layer"), + py::arg("fields") = py::list(), + py::arg("scale_factor")=1.0, + py::arg("offset_x")=0, + py::arg("offset_y")=0); +#endif + + // save + m.def("save_map", &save_map, + py::arg("Map"), + py::arg("filename"), + py::arg("explicit_defaults") = false); + + m.def("save_map_to_string", &save_map_to_string, + py::arg("Map"), + py::arg("explicit_defaults") = false); + + m.def("clear_cache", &clear_cache, + "\n" + "Clear all global caches of markers and mapped memory regions.\n" + "\n" + "Usage:\n" + ">>> from mapnik import clear_cache\n" + ">>> clear_cache()\n"); + + + m.def("render_to_file",&render_to_file1, + "\n" + "Render Map to file using explicit image type.\n" + "\n" + "Usage:\n" + ">>> from mapnik import Map, render_to_file, load_map\n" + ">>> m = Map(256,256)\n" + ">>> load_map(m,'mapfile.xml')\n" + ">>> render_to_file(m,'image32bit.png','png')\n" + "\n" + "8 bit (paletted) PNG can be requested with 'png256':\n" + ">>> render_to_file(m,'8bit_image.png','png256')\n" + "\n" + "JPEG quality can be controlled by adding a suffix to\n" + "'jpeg' between 0 and 100 (default is 85):\n" + ">>> render_to_file(m,'top_quality.jpeg','jpeg100')\n" + ">>> render_to_file(m,'medium_quality.jpeg','jpeg50')\n"); + + m.def("render_to_file",&render_to_file2, + "\n" + "Render Map to file (type taken from file extension)\n" + "\n" + "Usage:\n" + ">>> from mapnik import Map, render_to_file, load_map\n" + ">>> m = Map(256,256)\n" + ">>> render_to_file(m,'image.jpeg')\n" + "\n"); + + m.def("render_to_file",&render_to_file3, + "\n" + "Render Map to file using explicit image type and scale factor.\n" + "\n" + "Usage:\n" + ">>> from mapnik import Map, render_to_file, load_map\n" + ">>> m = Map(256,256)\n" + ">>> scale_factor = 4\n" + ">>> render_to_file(m,'image.jpeg',scale_factor)\n" + "\n"); + + m.def("has_pycairo", &has_pycairo, "Get pycairo module status"); } + +// // stl +// #include +// #include + +// void export_color(); +// void export_composite_modes(); +// void export_coord(); +// void export_layer(); +// void export_parameters(); +// void export_envelope(); +// void export_query(); +// void export_geometry(); +// void export_palette(); +// void export_image(); +// void export_image_view(); +// void export_gamma_method(); +// void export_scaling_method(); +// #if defined(GRID_RENDERER) +// void export_grid(); +// void export_grid_view(); +// #endif +// void export_map(); +// void export_python(); +// void export_expression(); +// void export_rule(); +// void export_style(); +// void export_feature(); +// void export_featureset(); +// void export_fontset(); +// void export_datasource(); +// void export_datasource_cache(); +// void export_symbolizer(); +// void export_markers_symbolizer(); +// void export_point_symbolizer(); +// void export_line_symbolizer(); +// void export_line_pattern_symbolizer(); +// void export_polygon_symbolizer(); +// void export_building_symbolizer(); +// void export_placement_finder(); +// void export_polygon_pattern_symbolizer(); +// void export_raster_symbolizer(); +// void export_text_symbolizer(); +// void export_shield_symbolizer(); +// void export_debug_symbolizer(); +// void export_group_symbolizer(); +// void export_font_engine(); +// void export_projection(); +// void export_proj_transform(); +// void export_view_transform(); +// void export_raster_colorizer(); +// void export_label_collision_detector(); +// void export_logger(); + +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #include +// #if defined(GRID_RENDERER) +// #include "python_grid_utils.hpp" +// #endif +//#include "mapnik_value_converter.hpp" +// #include "mapnik_enumeration_wrapper_converter.hpp" +//#include "mapnik_threads.hpp" +// #include "python_optional.hpp" +// #include +// #if defined(SHAPE_MEMORY_MAPPED_FILE) +// #include +// #endif + +// #if defined(SVG_RENDERER) +// #include +// #endif + +// namespace mapnik { +// class font_set; +// class layer; +// class color; +// class label_collision_detector4; +// } + +// #if defined(HAVE_CAIRO) +// #include +// #include +// #include +// #endif + +// #if defined(HAVE_PYCAIRO) +// #include +// #include +// #if PY_MAJOR_VERSION >= 3 +// #include +// #else +// #include +// static Pycairo_CAPI_t *Pycairo_CAPI; +// #endif + +// static void *extract_surface(PyObject* op) +// { +// if (PyObject_TypeCheck(op, const_cast(Pycairo_CAPI->Surface_Type))) +// { +// return op; +// } +// else +// { +// return 0; +// } +// } + +// static void *extract_context(PyObject* op) +// { +// if (PyObject_TypeCheck(op, const_cast(Pycairo_CAPI->Context_Type))) +// { +// return op; +// } +// else +// { +// return 0; +// } +// } + +// void register_cairo() +// { +// #if PY_MAJOR_VERSION >= 3 +// Pycairo_CAPI = (Pycairo_CAPI_t*) PyCapsule_Import(const_cast("cairo.CAPI"), 0); +// #else +// Pycairo_CAPI = (Pycairo_CAPI_t*) PyCObject_Import(const_cast("cairo"), const_cast("CAPI")); +// #endif +// if (Pycairo_CAPI == nullptr) return; + +// boost::python::converter::registry::insert(&extract_surface, boost::python::type_id()); +// boost::python::converter::registry::insert(&extract_context, boost::python::type_id()); +// } +// #endif + + + +// #pragma GCC diagnostic push +// #pragma GCC diagnostic ignored "-Wunused-local-typedef" +// BOOST_PYTHON_FUNCTION_OVERLOADS(load_map_overloads, load_map, 2, 4) +// BOOST_PYTHON_FUNCTION_OVERLOADS(load_map_string_overloads, load_map_string, 2, 4) +// BOOST_PYTHON_FUNCTION_OVERLOADS(save_map_overloads, save_map, 2, 3) +// BOOST_PYTHON_FUNCTION_OVERLOADS(save_map_to_string_overloads, save_map_to_string, 1, 2) +// BOOST_PYTHON_FUNCTION_OVERLOADS(render_overloads, render, 2, 5) +// BOOST_PYTHON_FUNCTION_OVERLOADS(render_with_detector_overloads, render_with_detector, 3, 6) +// #pragma GCC diagnostic pop + +// BOOST_PYTHON_MODULE(_mapnik) +// { + +// using namespace boost::python; + +// using mapnik::load_map; +// using mapnik::load_map_string; +// using mapnik::save_map; +// using mapnik::save_map_to_string; + +// register_exception_translator(&standard_error_translator); +// register_exception_translator(&out_of_range_error_translator); +// register_exception_translator(&value_error_translator); +// register_exception_translator(&runtime_error_translator); +// #if defined(HAVE_CAIRO) && defined(HAVE_PYCAIRO) +// register_cairo(); +// #endif +// export_query(); +// export_geometry(); +// export_feature(); +// export_featureset(); +// export_fontset(); +// export_datasource(); +// export_parameters(); +// export_color(); +// export_composite_modes(); +// export_envelope(); +// export_palette(); +// export_image(); +// export_image_view(); +// export_gamma_method(); +// export_scaling_method(); +// #if defined(GRID_RENDERER) +// export_grid(); +// export_grid_view(); +// #endif +// export_expression(); +// export_rule(); +// export_style(); +// export_layer(); +// export_datasource_cache(); +// export_symbolizer(); +// export_markers_symbolizer(); +// export_point_symbolizer(); +// export_line_symbolizer(); +// export_line_pattern_symbolizer(); +// export_polygon_symbolizer(); +// export_building_symbolizer(); +// export_placement_finder(); +// export_polygon_pattern_symbolizer(); +// export_raster_symbolizer(); +// export_text_symbolizer(); +// export_shield_symbolizer(); +// export_debug_symbolizer(); +// export_group_symbolizer(); +// export_font_engine(); +// export_projection(); +// export_proj_transform(); +// export_view_transform(); +// export_coord(); +// export_map(); +// export_raster_colorizer(); +// export_label_collision_detector(); +// export_logger(); + + +// def("render_tile_to_file",&render_tile_to_file, +// "\n" +// "TODO\n" +// "\n" +// ); + +// def("render_with_vars",&render_with_vars, +// (arg("map"), +// arg("image"), +// arg("vars"), +// arg("scale_factor")=1.0, +// arg("offset_x")=0, +// arg("offset_y")=0 +// ) +// ); + +// def("render", &render, render_overloads( +// "\n" +// "Render Map to an AGG image_any using offsets\n" +// "\n" +// "Usage:\n" +// ">>> from mapnik import Map, Image, render, load_map\n" +// ">>> m = Map(256,256)\n" +// ">>> load_map(m,'mapfile.xml')\n" +// ">>> im = Image(m.width,m.height)\n" +// ">>> scale_factor=2.0\n" +// ">>> offset = [100,50]\n" +// ">>> render(m,im)\n" +// ">>> render(m,im,scale_factor)\n" +// ">>> render(m,im,scale_factor,offset[0],offset[1])\n" +// "\n" +// )); + +// def("render_with_detector", &render_with_detector, render_with_detector_overloads( +// "\n" +// "Render Map to an AGG image_any using a pre-constructed detector.\n" +// "\n" +// "Usage:\n" +// ">>> from mapnik import Map, Image, LabelCollisionDetector, render_with_detector, load_map\n" +// ">>> m = Map(256,256)\n" +// ">>> load_map(m,'mapfile.xml')\n" +// ">>> im = Image(m.width,m.height)\n" +// ">>> detector = LabelCollisionDetector(m)\n" +// ">>> render_with_detector(m, im, detector)\n" +// )); +// def("save_map_to_string", &save_map_to_string, save_map_to_string_overloads()); +// def("mapnik_version", &mapnik_version,"Get the Mapnik version number"); +// def("mapnik_version_string", &mapnik_version_string,"Get the Mapnik version string"); +// def("has_proj", &has_proj, "Get proj status"); +// def("has_jpeg", &has_jpeg, "Get jpeg read/write support status"); +// def("has_png", &has_png, "Get png read/write support status"); +// def("has_tiff", &has_tiff, "Get tiff read/write support status"); +// def("has_webp", &has_webp, "Get webp read/write support status"); +// def("has_svg_renderer", &has_svg_renderer, "Get svg_renderer status"); +// def("has_grid_renderer", &has_grid_renderer, "Get grid_renderer status"); +// def("has_cairo", &has_cairo, "Get cairo library status"); +// def("has_pycairo", &has_pycairo, "Get pycairo module status"); + +// python_optional(); +// python_optional(); +// python_optional >(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// python_optional(); +// register_ptr_to_python(); +// register_ptr_to_python(); + +// to_python_converter(); +// to_python_converter(); +// to_python_converter(); +// } diff --git a/src/mapnik_query.cpp b/src/mapnik_query.cpp index 8d77eef5f..cc32e5c6d 100644 --- a/src/mapnik_query.cpp +++ b/src/mapnik_query.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,89 +20,59 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include "python_to_value.hpp" -#include -#pragma GCC diagnostic pop - // mapnik +#include #include -#include - +#include +#include "python_to_value.hpp" +#include "mapnik_value_converter.hpp" +//stl #include #include +//pybind11 +#include +#include -using mapnik::query; -using mapnik::box2d; - -namespace python = boost::python; - -struct resolution_to_tuple -{ - static PyObject* convert(query::resolution_type const& x) - { - python::object tuple(python::make_tuple(std::get<0>(x), std::get<1>(x))); - return python::incref(tuple.ptr()); - } - - static PyTypeObject const* get_pytype() - { - return &PyTuple_Type; - } -}; - -struct names_to_list -{ - static PyObject* convert(std::set const& names) - { - boost::python::list l; - for ( std::string const& name : names ) - { - l.append(name); - } - return python::incref(l.ptr()); - } - - static PyTypeObject const* get_pytype() - { - return &PyList_Type; - } -}; +namespace py = pybind11; -namespace { - - void set_variables(mapnik::query & q, boost::python::dict const& d) - { - mapnik::attributes vars = mapnik::dict2attr(d); - q.set_variables(vars); - } -} - -void export_query() +void export_query(py::module const& m) { - using namespace boost::python; - - to_python_converter (); - to_python_converter, names_to_list> (); - - class_("Query", "a spatial query data object", - init,query::resolution_type const&,double>() ) - .def(init >()) - .add_property("resolution",make_function(&query::resolution, - return_value_policy())) - .add_property("bbox", make_function(&query::get_bbox, - return_value_policy()) ) - .add_property("property_names", make_function(&query::property_names, - return_value_policy()) ) + using mapnik::query; + using mapnik::box2d; + + py::class_(m, "Query", "a spatial query data object") + .def(py::init,query::resolution_type const&, double>()) + .def(py::init>()) + .def_property_readonly("resolution", [] (query const& q) { + auto resolution = q.resolution(); + return py::make_tuple(std::get<0>(resolution), + std::get<1>(resolution)); + }) + .def_property_readonly("scale_denominator", &query::scale_denominator) + .def_property_readonly("bbox", &query::get_bbox) + .def_property_readonly("unbuffered_bbox", &query::get_unbuffered_bbox) + .def_property_readonly("property_names",[] (query const& q){ + auto names = q.property_names(); + py::list obj; + for (std::string const& name : names) + { + obj.append(name); + } + return obj; + }) .def("add_property_name", &query::add_property_name) - .def("set_variables",&set_variables); + .def_property("variables", + [] (query const& q) { + py::dict d; + for (auto kv : q.variables()) + { + d[kv.first.c_str()] = kv.second; + } + return d; + }, + [] (query& q, py::dict const& d) { + mapnik::attributes vars = mapnik::dict2attr(d); + q.set_variables(vars); + }) + ; } diff --git a/src/mapnik_raster_colorizer.cpp b/src/mapnik_raster_colorizer.cpp index a57d2d6a7..b7b733cd8 100644 --- a/src/mapnik_raster_colorizer.cpp +++ b/src/mapnik_raster_colorizer.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,23 +20,16 @@ * *****************************************************************************/ +//mapnik #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - -// mapnik #include #include +//pybind11 +#include +#include +#include + +namespace py = pybind11; using mapnik::raster_colorizer; using mapnik::raster_colorizer_ptr; @@ -45,11 +38,6 @@ using mapnik::colorizer_stop; using mapnik::colorizer_stops; using mapnik::colorizer_mode_enum; using mapnik::color; -using mapnik::COLORIZER_INHERIT; -using mapnik::COLORIZER_LINEAR; -using mapnik::COLORIZER_DISCRETE; -using mapnik::COLORIZER_EXACT; - namespace { void add_stop(raster_colorizer_ptr & rc, colorizer_stop & stop) @@ -81,7 +69,7 @@ void add_stop5(raster_colorizer_ptr &rc, float v, colorizer_mode_enum m, color c rc->add_stop(stop); } -mapnik::color get_color(raster_colorizer_ptr &rc, float value) +mapnik::color get_color(raster_colorizer_ptr const&rc, float value) { unsigned rgba = rc->get_color(value); unsigned r = (rgba & 0xff); @@ -98,83 +86,80 @@ colorizer_stops const& get_stops(raster_colorizer_ptr & rc) } -void export_raster_colorizer() +void export_raster_colorizer(py::module const& m) { - using namespace boost::python; - - implicitly_convertible(); - - class_("RasterColorizer", - "A Raster Colorizer object.", - init(args("default_mode","default_color")) - ) - .def(init<>()) - .add_property("default_color", - make_function(&raster_colorizer::get_default_color, return_value_policy()), + py::class_(m, "RasterColorizer", + "A Raster Colorizer object.") + + .def(py::init(), + py::arg("default_mode"), py::arg("default_color")) + .def(py::init<>()) + .def_property("default_color", + &raster_colorizer::get_default_color, &raster_colorizer::set_default_color, "The default color for stops added without a color (mapnik.Color).\n") - .add_property("default_mode", + .def_property("default_mode", &raster_colorizer::get_default_mode_enum, &raster_colorizer::set_default_mode_enum, "The default mode (mapnik.ColorizerMode).\n" "\n" "If a stop is added without a mode, then it will inherit this default mode\n") - .add_property("stops", - make_function(get_stops,return_value_policy()), + .def_property_readonly("stops", + get_stops, "The list of stops this RasterColorizer contains\n") - .add_property("epsilon", + .def_property("epsilon", &raster_colorizer::get_epsilon, &raster_colorizer::set_epsilon, "Comparison epsilon value for exact mode\n" "\n" "When comparing values in exact mode, values need only be within epsilon to match.\n") - .def("add_stop", add_stop, - (arg("ColorizerStop")), "Add a colorizer stop to the raster colorizer.\n" "\n" "Usage:\n" ">>> colorizer = mapnik.RasterColorizer()\n" ">>> color = mapnik.Color(\"#0044cc\")\n" ">>> stop = mapnik.ColorizerStop(3, mapnik.COLORIZER_INHERIT, color)\n" - ">>> colorizer.add_stop(stop)\n" + ">>> colorizer.add_stop(stop)\n", + py::arg("ColorizerStop") ) .def("add_stop", add_stop2, - (arg("value")), + "Add a colorizer stop to the raster colorizer, using the default mode and color.\n" "\n" "Usage:\n" ">>> default_color = mapnik.Color(\"#0044cc\")\n" ">>> colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_LINEAR, default_color)\n" - ">>> colorizer.add_stop(100)\n" + ">>> colorizer.add_stop(100)\n", + py::arg("value") ) .def("add_stop", add_stop3, - (arg("value")), "Add a colorizer stop to the raster colorizer, using the default mode.\n" "\n" "Usage:\n" ">>> default_color = mapnik.Color(\"#0044cc\")\n" ">>> colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_LINEAR, default_color)\n" - ">>> colorizer.add_stop(100, mapnik.Color(\"#123456\"))\n" + ">>> colorizer.add_stop(100, mapnik.Color(\"#123456\"))\n", + py::arg("value"), py::arg("color") ) .def("add_stop", add_stop4, - (arg("value")), "Add a colorizer stop to the raster colorizer, using the default color.\n" "\n" "Usage:\n" ">>> default_color = mapnik.Color(\"#0044cc\")\n" ">>> colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_LINEAR, default_color)\n" - ">>> colorizer.add_stop(100, mapnik.COLORIZER_EXACT)\n" + ">>> colorizer.add_stop(100, mapnik.COLORIZER_EXACT)\n", + py::arg("value"), py::arg("ColorizerMode") ) .def("add_stop", add_stop5, - (arg("value")), "Add a colorizer stop to the raster colorizer.\n" "\n" "Usage:\n" ">>> default_color = mapnik.Color(\"#0044cc\")\n" ">>> colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_LINEAR, default_color)\n" - ">>> colorizer.add_stop(100, mapnik.COLORIZER_DISCRETE, mapnik.Color(\"#112233\"))\n" + ">>> colorizer.add_stop(100, mapnik.COLORIZER_DISCRETE, mapnik.Color(\"#112233\"))\n", + py::arg("value"), py::arg("ColorizerMode"), py::arg("color") ) .def("get_color", get_color, "Get the color assigned to a certain value in raster data.\n" @@ -191,52 +176,54 @@ void export_raster_colorizer() - class_("ColorizerStops", + py::class_(m, "ColorizerStops", "A RasterColorizer's collection of ordered color stops.\n" "This class is not meant to be instantiated from python. However, " "it can be accessed at a RasterColorizer's \"stops\" attribute for " - "introspection purposes", - no_init) - .def(vector_indexing_suite()) + "introspection purposes") + .def("__iter__", [] (colorizer_stops const& stops) { + return py::make_iterator(stops.begin(), stops.end()); + }) ; - enum_("ColorizerMode") - .value("COLORIZER_INHERIT", COLORIZER_INHERIT) - .value("COLORIZER_LINEAR", COLORIZER_LINEAR) - .value("COLORIZER_DISCRETE", COLORIZER_DISCRETE) - .value("COLORIZER_EXACT", COLORIZER_EXACT) + py::native_enum(m, "ColorizerMode", "enum.Enum") + .value("COLORIZER_INHERIT", colorizer_mode_enum::COLORIZER_INHERIT) + .value("COLORIZER_LINEAR", colorizer_mode_enum::COLORIZER_LINEAR) + .value("COLORIZER_DISCRETE", colorizer_mode_enum::COLORIZER_DISCRETE) + .value("COLORIZER_EXACT", colorizer_mode_enum::COLORIZER_EXACT) .export_values() + .finalize() ; - class_("ColorizerStop",init( + py::class_(m, "ColorizerStop", "A Colorizer Stop object.\n" "Create with a value, ColorizerMode, and Color\n" "\n" "Usage:" ">>> color = mapnik.Color(\"#fff000\")\n" - ">>> stop= mapnik.ColorizerStop(42.42, mapnik.COLORIZER_LINEAR, color)\n" - )) - .add_property("color", - make_function(&colorizer_stop::get_color, return_value_policy()), + ">>> stop= mapnik.ColorizerStop(42.42, mapnik.COLORIZER_LINEAR, color)\n") + .def(py::init()) + .def_property("color", + &colorizer_stop::get_color, &colorizer_stop::set_color, "The stop color (mapnik.Color).\n") - .add_property("value", + .def_property("value", &colorizer_stop::get_value, &colorizer_stop::set_value, "The stop value.\n") - .add_property("label", - make_function(&colorizer_stop::get_label, return_value_policy()), + .def_property("label", + &colorizer_stop::get_label, &colorizer_stop::set_label, "The stop label.\n") - .add_property("mode", + .def_property("mode", &colorizer_stop::get_mode_enum, &colorizer_stop::set_mode_enum, "The stop mode (mapnik.ColorizerMode).\n" "\n" "If this is COLORIZER_INHERIT then it will inherit the default mode\n" " from the RasterColorizer it is added to.\n") - .def(self == self) - .def("__str__",&colorizer_stop::to_string) + .def(py::self == py::self) + .def("__str__", &colorizer_stop::to_string) ; } diff --git a/src/mapnik_raster_symbolizer.cpp b/src/mapnik_raster_symbolizer.cpp new file mode 100644 index 000000000..a4aaa5f6f --- /dev/null +++ b/src/mapnik_raster_symbolizer.cpp @@ -0,0 +1,65 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_raster_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::raster_symbolizer; + using mapnik::scaling_method_e; + + py::class_(m, "RasterSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("opacity", + &get_property, + &set_double_property, + "Opacity - [0..1]") + .def_property("mesh_size", + &get_property, + &set_integer_property, + "Mesh size") + .def_property("scaling", + &get_property, + &set_enum_property) + .def_property("colorizer", + &get_property, + &set_colorizer_property) + .def_property("premultiplied", + &get_property, + &set_boolean_property, + "Premultiplied - False/True") + + ; + +} diff --git a/src/mapnik_rule.cpp b/src/mapnik_rule.cpp index b31959773..16407b529 100644 --- a/src/mapnik_rule.cpp +++ b/src/mapnik_rule.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -21,24 +21,17 @@ *****************************************************************************/ #include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - // mapnik #include #include #include +//pybind11 +#include +#include +#include +#include + +namespace py = pybind11; using mapnik::rule; using mapnik::expr_node; @@ -57,45 +50,34 @@ using mapnik::group_symbolizer; using mapnik::symbolizer; using mapnik::to_expression_string; -void export_rule() +PYBIND11_MAKE_OPAQUE(std::vector); + +void export_rule(py::module const& m) { - using namespace boost::python; - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); + py::bind_vector>(m, "Symbolizers", py::module_local()); - class_("Symbolizers",init<>("TODO")) - .def(vector_indexing_suite()) - ; + py::class_(m, "Rule") + .def(py::init<>(), "default constructor") + .def(py::init(), + py::arg("name"), + py::arg("min_scale_denominator")=0, + py::arg("max_scale_denominator")=std::numeric_limits::infinity()) - class_("Rule",init<>("default constructor")) - .def(init >()) - .add_property("name",make_function - (&rule::get_name, - return_value_policy()), + .def_property("name", + &rule::get_name, &rule::set_name) - .add_property("filter",make_function - (&rule::get_filter,return_value_policy()), + + .def_property("filter", + &rule::get_filter, &rule::set_filter) - .add_property("min_scale",&rule::get_min_scale,&rule::set_min_scale) - .add_property("max_scale",&rule::get_max_scale,&rule::set_max_scale) - .def("set_else",&rule::set_else) - .def("has_else",&rule::has_else_filter) - .def("set_also",&rule::set_also) - .def("has_also",&rule::has_also_filter) - .def("active",&rule::active) - .add_property("symbols",make_function - (&rule::get_symbolizers,return_value_policy())) - .add_property("copy_symbols",make_function - (&rule::get_symbolizers,return_value_policy())) + + .def_property("min_scale", &rule::get_min_scale, &rule::set_min_scale) + .def_property("max_scale", &rule::get_max_scale, &rule::set_max_scale) + .def("set_else", &rule::set_else) + .def("has_else", &rule::has_else_filter) + .def("set_also", &rule::set_also) + .def("has_also", &rule::has_also_filter) + .def("active", &rule::active) + .def_property_readonly("symbolizers", &rule::get_symbolizers) ; } diff --git a/src/mapnik_scaling_method.cpp b/src/mapnik_scaling_method.cpp index 24522bbe8..9d45f4797 100644 --- a/src/mapnik_scaling_method.cpp +++ b/src/mapnik_scaling_method.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,24 +20,17 @@ * *****************************************************************************/ - +// mapnik #include +//pybind11 +#include +#include -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop +namespace py = pybind11; -void export_scaling_method() +void export_scaling_method(py::module const& m) { - using namespace boost::python; - - enum_("scaling_method") + py::native_enum(m, "scaling_method", "enum.IntEnum") .value("NEAR", mapnik::SCALING_NEAR) .value("BILINEAR", mapnik::SCALING_BILINEAR) .value("BICUBIC", mapnik::SCALING_BICUBIC) @@ -55,5 +48,6 @@ void export_scaling_method() .value("SINC", mapnik::SCALING_SINC) .value("LANCZOS", mapnik::SCALING_LANCZOS) .value("BLACKMAN", mapnik::SCALING_BLACKMAN) + .finalize() ; } diff --git a/src/mapnik_shield_symbolizer.cpp b/src/mapnik_shield_symbolizer.cpp new file mode 100644 index 000000000..3df40828b --- /dev/null +++ b/src/mapnik_shield_symbolizer.cpp @@ -0,0 +1,69 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" +//pybind11 +#include + +namespace py = pybind11; + +void export_shield_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::shield_symbolizer; + + py::class_(m, "ShieldSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__", hash_impl_2) + .def_property("file", + &get_property, + &set_path_property, + "Shield image file path or mapnik.PathExpression") + .def_property("shield_dx", + &get_property, + &set_double_property, + "shield_dx displacement") + .def_property("shield_dy", + &get_property, + &set_double_property, + "shield_dy displacement") + .def_property("image_transform", + &get_transform, + &set_transform, + "Shield image transform") + .def_property("unlock_image", + &get_property, + &set_boolean_property, + "Unlock shield image") + .def_property("offset", + &get_property, + &set_double_property, + "Shield offset") + ; + +} diff --git a/src/mapnik_style.cpp b/src/mapnik_style.cpp index da0f47bfd..779c98687 100644 --- a/src/mapnik_style.cpp +++ b/src/mapnik_style.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,31 +20,27 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - // mapnik -#include +#include +#include #include -#include "mapnik_enumeration.hpp" #include #include // generate_image_filters +//pybind11 +#include +#include +#include +#include + +namespace py = pybind11; using mapnik::feature_type_style; +using mapnik::filter_mode_enum; using mapnik::rules; using mapnik::rule; +PYBIND11_MAKE_OPAQUE(rules); + std::string get_image_filters(feature_type_style & style) { std::string filters_str; @@ -61,59 +57,54 @@ void set_image_filters(feature_type_style & style, std::string const& filters) { throw mapnik::value_error("failed to parse image-filters: '" + filters + "'"); } -#ifdef _WINDOWS - style.image_filters() = new_filters; - // FIXME : https://svn.boost.org/trac/boost/ticket/2839 -#else style.image_filters() = std::move(new_filters); -#endif } -void export_style() +py::object get_filter_mode(feature_type_style const& style) { - using namespace boost::python; + return py::cast(filter_mode_enum(style.get_filter_mode())); +} - mapnik::enumeration_("filter_mode") - .value("ALL",mapnik::FILTER_ALL) - .value("FIRST",mapnik::FILTER_FIRST) - ; +void set_filter_mode(feature_type_style& style, filter_mode_enum mode) +{ + style.set_filter_mode(mapnik::filter_mode_e(mode)); +} - class_("Rules",init<>("default ctor")) - .def(vector_indexing_suite()) +void export_style(py::module const& m) +{ + py::native_enum(m, "filter_mode", "enum.Enum") + .value("ALL",mapnik::filter_mode_enum::FILTER_ALL) + .value("FIRST",mapnik::filter_mode_enum::FILTER_FIRST) + .finalize() ; - class_("Style",init<>("default style constructor")) - .add_property("rules",make_function - (&feature_type_style::get_rules, - return_value_policy()), - "List of rules belonging to a style as rule objects.\n" - "\n" - "Usage:\n" - ">>> for r in m.find_style('style 1').rules:\n" - ">>> print r\n" - "\n" - "\n" - ) - .add_property("filter_mode", - &feature_type_style::get_filter_mode, - &feature_type_style::set_filter_mode, + py::bind_vector(m, "Rules", py::module_local()); + + py::class_(m, "Style") + .def(py::init<>(), "default style constructor") + .def_property_readonly("rules", + &feature_type_style::get_rules, + "Rules assigned to this style.\n") + .def_property("filter_mode", + &get_filter_mode, + &set_filter_mode, "Set/get the filter mode of the style") - .add_property("opacity", + .def_property("opacity", &feature_type_style::get_opacity, &feature_type_style::set_opacity, "Set/get the opacity of the style") - .add_property("comp_op", + .def_property("comp_op", &feature_type_style::comp_op, &feature_type_style::set_comp_op, "Set/get the comp-op (composite operation) of the style") - .add_property("image_filters_inflate", + .def_property("image_filters_inflate", &feature_type_style::image_filters_inflate, &feature_type_style::image_filters_inflate, "Set/get the image_filters_inflate property of the style") - .add_property("image_filters", + .def_property("image_filters", get_image_filters, set_image_filters, - "Set/get the comp-op (composite operation) of the style") + "Set/get image filters for the style") ; } diff --git a/src/mapnik_svg_generator_grammar.cpp b/src/mapnik_svg_generator_grammar.cpp deleted file mode 100644 index 5c02b6e4a..000000000 --- a/src/mapnik_svg_generator_grammar.cpp +++ /dev/null @@ -1,27 +0,0 @@ -/***************************************************************************** - * - * This file is part of Mapnik (c++ mapping toolkit) - * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - *****************************************************************************/ - -#include -#include - -using sink_type = std::back_insert_iterator; -template struct mapnik::svg::svg_path_generator; diff --git a/src/mapnik_symbolizer.cpp b/src/mapnik_symbolizer.cpp index 419549467..7031121ba 100644 --- a/src/mapnik_symbolizer.cpp +++ b/src/mapnik_symbolizer.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,404 +20,368 @@ * *****************************************************************************/ -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include #include +#include #include #include #include -#include "mapnik_enumeration.hpp" -#include "mapnik_svg.hpp" #include -#include +#include +#include #include // for known_svg_prefix_ #include #include #include #include +#include +#include -// stl -#include +#include "mapnik_symbolizer.hpp" + +//pybind11 +#include +#include +#include +#include + +namespace py = pybind11; using mapnik::symbolizer; +using mapnik::building_symbolizer; +using mapnik::debug_symbolizer; +using mapnik::dot_symbolizer; +using mapnik::group_symbolizer; using mapnik::point_symbolizer; using mapnik::line_symbolizer; using mapnik::line_pattern_symbolizer; +using mapnik::markers_symbolizer; using mapnik::polygon_symbolizer; using mapnik::polygon_pattern_symbolizer; using mapnik::raster_symbolizer; using mapnik::shield_symbolizer; using mapnik::text_symbolizer; -using mapnik::building_symbolizer; -using mapnik::markers_symbolizer; -using mapnik::debug_symbolizer; -using mapnik::group_symbolizer; using mapnik::symbolizer_base; -using mapnik::color; -using mapnik::path_processor_type; -using mapnik::path_expression_ptr; -using mapnik::guess_type; -using mapnik::expression_ptr; -using mapnik::parse_path; +using namespace python_mapnik; namespace { -using namespace boost::python; -void __setitem__(mapnik::symbolizer_base & sym, std::string const& name, mapnik::symbolizer_base::value_type const& val) -{ - put(sym, mapnik::get_key(name), val); -} -std::shared_ptr numeric_wrapper(const object& arg) -{ - std::shared_ptr result; - if (PyBool_Check(arg.ptr())) - { - mapnik::value_bool val = extract(arg); - result.reset(new mapnik::symbolizer_base::value_type(val)); - } - else if (PyFloat_Check(arg.ptr())) - { - mapnik::value_double val = extract(arg); - result.reset(new mapnik::symbolizer_base::value_type(val)); - } - else - { - mapnik::value_integer val = extract(arg); - result.reset(new mapnik::symbolizer_base::value_type(val)); - } - return result; -} - -struct extract_python_object +struct extract_underlying_type_visitor { - using result_type = boost::python::object; - template - auto operator() (T const& val) const -> result_type + py::object operator() (T const& sym) const { - return result_type(val); // wrap into python object + return py::cast(sym); } }; -boost::python::object __getitem__(mapnik::symbolizer_base const& sym, std::string const& name) +inline py::object extract_underlying_type(symbolizer const& sym) { - using const_iterator = symbolizer_base::cont_type::const_iterator; - mapnik::keys key = mapnik::get_key(name); - const_iterator itr = sym.properties.find(key); - if (itr != sym.properties.end()) - { - return mapnik::util::apply_visitor(extract_python_object(), itr->second); - } - //mapnik::property_meta_type const& meta = mapnik::get_meta(key); - //return mapnik::util::apply_visitor(extract_python_object(), std::get<1>(meta)); - return boost::python::object(); + return mapnik::util::apply_visitor(extract_underlying_type_visitor(), sym); } -/* std::string __str__(mapnik::symbolizer const& sym) { return mapnik::util::apply_visitor(mapnik::symbolizer_to_json(), sym); } -*/ - -std::string get_symbolizer_type(symbolizer const& sym) -{ - return mapnik::symbolizer_name(sym); // FIXME - do we need this ? -} -std::size_t hash_impl(symbolizer const& sym) +std::string symbolizer_type_name(symbolizer const& sym) { - return mapnik::util::apply_visitor(mapnik::symbolizer_hash_visitor(), sym); + return mapnik::symbolizer_name(sym); } -template -std::size_t hash_impl_2(T const& sym) +struct symbolizer_keys_visitor { - return mapnik::symbolizer_hash::value(sym); -} + symbolizer_keys_visitor(py::list & keys) + : keys_(keys) {} -struct extract_underlying_type_visitor -{ - template - boost::python::object operator() (T const& sym) const + template + void operator() (Symbolizer const& sym) const { - return boost::python::object(sym); + for (auto const& kv : sym.properties) + { + std::string name = std::get<0>(mapnik::get_meta(kv.first)); + keys_.append(name); + } } + py::list & keys_; }; -boost::python::object extract_underlying_type(symbolizer const& sym) -{ - return mapnik::util::apply_visitor(extract_underlying_type_visitor(), sym); -} - -} - -void export_symbolizer() -{ - using namespace boost::python; - - //implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible(); - implicitly_convertible, mapnik::symbolizer_base::value_type>(); - - enum_("keys") - .value("gamma", mapnik::keys::gamma) - .value("gamma_method",mapnik::keys::gamma_method) - ; - - class_("Symbolizer",no_init) - .def("type",get_symbolizer_type) - .def("__hash__",hash_impl) - .def("extract", extract_underlying_type) - ; - - class_("NumericWrapper") - .def("__init__", make_constructor(numeric_wrapper)) - ; - - class_("SymbolizerBase",no_init) - .def("__setitem__",&__setitem__) - .def("__setattr__",&__setitem__) - .def("__getitem__",&__getitem__) - .def("__getattr__",&__getitem__) - //.def("__str__", &__str__) - .def(self == self) // __eq__ - ; -} - - -void export_shield_symbolizer() +struct symbolizer_getitem_visitor { - using namespace boost::python; - class_< shield_symbolizer, bases >("ShieldSymbolizer", - init<>("Default ctor")) - .def("__hash__",hash_impl_2) - ; - -} - -void export_polygon_symbolizer() -{ - using namespace boost::python; - - class_ >("PolygonSymbolizer", - init<>("Default ctor")) - .def("__hash__",hash_impl_2) - ; - -} - -void export_polygon_pattern_symbolizer() -{ - using namespace boost::python; - - mapnik::enumeration_("pattern_alignment") - .value("LOCAL",mapnik::LOCAL_ALIGNMENT) - .value("GLOBAL",mapnik::GLOBAL_ALIGNMENT) - ; + using const_iterator = symbolizer_base::cont_type::const_iterator; + symbolizer_getitem_visitor(std::string const& name) + : name_(name) {} - class_("PolygonPatternSymbolizer", - init<>("Default ctor")) - .def("__hash__",hash_impl_2) - ; -} + template + py::object operator() (Symbolizer const& sym) const + { + for (auto const& kv : sym.properties) + { + std::string name = std::get<0>(mapnik::get_meta(kv.first)); + if (name == name_) + { + return mapnik::util::apply_visitor(extract_python_object<>(kv.first), std::get<1>(kv)); + } + } + throw pybind11::key_error("Invalid property name"); + } + std::string const& name_; +}; -void export_raster_symbolizer() +py::object symbolizer_keys(mapnik::symbolizer const& sym) { - using namespace boost::python; - - class_ >("RasterSymbolizer", - init<>("Default ctor")) - ; + py::list keys; + mapnik::util::apply_visitor(symbolizer_keys_visitor(keys), sym); + return keys; } -void export_point_symbolizer() +py::object getitem_impl(mapnik::symbolizer const& sym, std::string const& name) { - using namespace boost::python; - - mapnik::enumeration_("point_placement") - .value("CENTROID",mapnik::CENTROID_POINT_PLACEMENT) - .value("INTERIOR",mapnik::INTERIOR_POINT_PLACEMENT) - ; - - class_ >("PointSymbolizer", - init<>("Default Point Symbolizer - 4x4 black square")) - .def("__hash__",hash_impl_2) - ; + return mapnik::util::apply_visitor(symbolizer_getitem_visitor(name), sym); } -void export_markers_symbolizer() +py::object symbolizer_base_keys(mapnik::symbolizer_base const& sym) { - using namespace boost::python; - - mapnik::enumeration_("marker_placement") - .value("POINT_PLACEMENT",mapnik::MARKER_POINT_PLACEMENT) - .value("INTERIOR_PLACEMENT",mapnik::MARKER_INTERIOR_PLACEMENT) - .value("LINE_PLACEMENT",mapnik::MARKER_LINE_PLACEMENT) - ; - - mapnik::enumeration_("marker_multi_policy") - .value("EACH",mapnik::MARKER_EACH_MULTI) - .value("WHOLE",mapnik::MARKER_WHOLE_MULTI) - .value("LARGEST",mapnik::MARKER_LARGEST_MULTI) - ; - - class_ >("MarkersSymbolizer", - init<>("Default Markers Symbolizer - circle")) - .def("__hash__",hash_impl_2) - ; + py::list keys; + for (auto const& kv : sym.properties) + { + std::string name = std::get<0>(mapnik::get_meta(kv.first)); + keys.append(name); + } + return keys; } +} // namespace -void export_line_symbolizer() +void export_symbolizer(py::module const& m) { - using namespace boost::python; - - mapnik::enumeration_("line_rasterizer") - .value("FULL",mapnik::RASTERIZER_FULL) - .value("FAST",mapnik::RASTERIZER_FAST) + py::implicitly_convertible(); + py::class_(m, "Symbolizer") + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + .def(py::init()) + + .def("type_name", symbolizer_type_name) + .def("__hash__", hash_impl) + .def("__getitem__",&getitem_impl) + .def("__getattr__",&getitem_impl) + .def("keys", &symbolizer_keys) + .def("extract", &extract_underlying_type) + .def("__str__", &__str__) + .def("__repr__", &__str__) + .def("to_json", &__str__) ; - mapnik::enumeration_("stroke_linecap", - "The possible values for a line cap used when drawing\n" - "with a stroke.\n") - .value("BUTT_CAP",mapnik::BUTT_CAP) - .value("SQUARE_CAP",mapnik::SQUARE_CAP) - .value("ROUND_CAP",mapnik::ROUND_CAP) + py::class_(m, "SymbolizerBase") + //.def("__getitem__",&__getitem__) + //.def("__getattr__",&__getitem__) + .def("keys", &symbolizer_base_keys) + .def(py::self == py::self) // __eq__ + .def_property("smooth", + &get_property, + &set_double_property, + "Smoothing value") + .def_property("simplify_tolerance", + &get_property, + &set_double_property, + "Simplify tolerance") + .def_property("clip", + &get_property, + &set_boolean_property, + "Clip - False/True") + .def_property("comp_op", + &get, + &set_enum_property, + "Composite mode (comp-op)") + .def_property("geometry_transform", + &get_transform, + &set_transform, + "Geometry transform") ; - mapnik::enumeration_("stroke_linejoin", - "The possible values for the line joining mode\n" - "when drawing with a stroke.\n") - .value("MITER_JOIN",mapnik::MITER_JOIN) - .value("MITER_REVERT_JOIN",mapnik::MITER_REVERT_JOIN) - .value("ROUND_JOIN",mapnik::ROUND_JOIN) - .value("BEVEL_JOIN",mapnik::BEVEL_JOIN) - ; - - - class_ >("LineSymbolizer", - init<>("Default LineSymbolizer - 1px solid black")) - .def("__hash__",hash_impl_2) - ; + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); + py::implicitly_convertible(); } -void export_line_pattern_symbolizer() -{ - using namespace boost::python; - - class_ >("LinePatternSymbolizer", - init<> ("Default LinePatternSymbolizer")) - .def("__hash__",hash_impl_2) - ; -} - -void export_debug_symbolizer() -{ - using namespace boost::python; - - mapnik::enumeration_("debug_symbolizer_mode") - .value("COLLISION",mapnik::DEBUG_SYM_MODE_COLLISION) - .value("VERTEX",mapnik::DEBUG_SYM_MODE_VERTEX) - ; - class_ >("DebugSymbolizer", - init<>("Default debug Symbolizer")) - .def("__hash__",hash_impl_2) - ; -} - -void export_building_symbolizer() -{ - using namespace boost::python; - - class_ >("BuildingSymbolizer", - init<>("Default BuildingSymbolizer")) - .def("__hash__",hash_impl_2) - ; - -} - -namespace { - -void group_symbolizer_properties_set_layout_simple(mapnik::group_symbolizer_properties &p, - mapnik::simple_row_layout &s) -{ - p.set_layout(s); -} - -void group_symbolizer_properties_set_layout_pair(mapnik::group_symbolizer_properties &p, - mapnik::pair_layout &s) -{ - p.set_layout(s); -} - -std::shared_ptr group_rule_construct1(mapnik::expression_ptr p) -{ - return std::make_shared(p, mapnik::expression_ptr()); -} - -} // anonymous namespace - -void export_group_symbolizer() -{ - using namespace boost::python; - using mapnik::group_rule; - using mapnik::simple_row_layout; - using mapnik::pair_layout; - using mapnik::group_symbolizer_properties; - - class_ >("GroupRule", - init()) - .def("__init__", boost::python::make_constructor(group_rule_construct1)) - .def("append", &group_rule::append) - .def("set_filter", &group_rule::set_filter) - .def("set_repeat_key", &group_rule::set_repeat_key) - ; - - class_("SimpleRowLayout") - .def("item_margin", &simple_row_layout::get_item_margin) - .def("set_item_margin", &simple_row_layout::set_item_margin) - ; - - class_("PairLayout") - .def("item_margin", &simple_row_layout::get_item_margin) - .def("set_item_margin", &simple_row_layout::set_item_margin) - .def("max_difference", &pair_layout::get_max_difference) - .def("set_max_difference", &pair_layout::set_max_difference) - ; - - class_ >("GroupSymbolizerProperties") - .def("add_rule", &group_symbolizer_properties::add_rule) - .def("set_layout", &group_symbolizer_properties_set_layout_simple) - .def("set_layout", &group_symbolizer_properties_set_layout_pair) - ; - - class_ >("GroupSymbolizer", - init<>("Default GroupSymbolizer")) - .def("__hash__",hash_impl_2) - ; - -} +// void export_shield_symbolizer() +// { +// using namespace boost::python; +// class_< shield_symbolizer, bases >("ShieldSymbolizer", +// init<>("Default ctor")) +// .def("__hash__",hash_impl_2) +// ; + +// } + + +// void export_polygon_pattern_symbolizer() +// { +// using namespace boost::python; + +// mapnik::enumeration_("pattern_alignment") +// .value("LOCAL",mapnik::pattern_alignment_enum::LOCAL_ALIGNMENT) +// .value("GLOBAL",mapnik::pattern_alignment_enum::GLOBAL_ALIGNMENT) +// ; + +// class_("PolygonPatternSymbolizer", +// init<>("Default ctor")) +// .def("__hash__",hash_impl_2) +// ; +// } + +// void export_raster_symbolizer() +// { +// using namespace boost::python; + +// class_ >("RasterSymbolizer", +// init<>("Default ctor")) +// ; +// } + +// void export_markers_symbolizer() +// { +// using namespace boost::python; + +// mapnik::enumeration_("marker_placement") +// .value("POINT_PLACEMENT",mapnik::marker_placement_enum::MARKER_POINT_PLACEMENT) +// .value("INTERIOR_PLACEMENT",mapnik::marker_placement_enum::MARKER_INTERIOR_PLACEMENT) +// .value("LINE_PLACEMENT",mapnik::marker_placement_enum::MARKER_LINE_PLACEMENT) +// ; + +// mapnik::enumeration_("marker_multi_policy") +// .value("EACH",mapnik::marker_multi_policy_enum::MARKER_EACH_MULTI) +// .value("WHOLE",mapnik::marker_multi_policy_enum::MARKER_WHOLE_MULTI) +// .value("LARGEST",mapnik::marker_multi_policy_enum::MARKER_LARGEST_MULTI) +// ; + +// class_ >("MarkersSymbolizer", +// init<>("Default Markers Symbolizer - circle")) +// .def("__hash__",hash_impl_2) +// ; +// } + +// void export_line_pattern_symbolizer() +// { +// using namespace boost::python; + +// class_ >("LinePatternSymbolizer", +// init<> ("Default LinePatternSymbolizer")) +// .def("__hash__",hash_impl_2) +// ; +// } + +// void export_debug_symbolizer() +// { +// using namespace boost::python; + +// mapnik::enumeration_("debug_symbolizer_mode") +// .value("COLLISION",mapnik::debug_symbolizer_mode_enum::DEBUG_SYM_MODE_COLLISION) +// .value("VERTEX",mapnik::debug_symbolizer_mode_enum::DEBUG_SYM_MODE_VERTEX) +// ; + +// class_ >("DebugSymbolizer", +// init<>("Default debug Symbolizer")) +// .def("__hash__",hash_impl_2) +// ; +// } + +// void export_building_symbolizer() +// { +// using namespace boost::python; + +// class_ >("BuildingSymbolizer", +// init<>("Default BuildingSymbolizer")) +// .def("__hash__",hash_impl_2) +// ; + +// } + +// namespace { + +// void group_symbolizer_properties_set_layout_simple(mapnik::group_symbolizer_properties &p, +// mapnik::simple_row_layout &s) +// { +// p.set_layout(s); +// } + +// void group_symbolizer_properties_set_layout_pair(mapnik::group_symbolizer_properties &p, +// mapnik::pair_layout &s) +// { +// p.set_layout(s); +// } + +// std::shared_ptr group_rule_construct1(mapnik::expression_ptr p) +// { +// return std::make_shared(p, mapnik::expression_ptr()); +// } + +// } // anonymous namespace + +// void export_group_symbolizer() +// { +// using namespace boost::python; +// using mapnik::group_rule; +// using mapnik::simple_row_layout; +// using mapnik::pair_layout; +// using mapnik::group_symbolizer_properties; + +// class_ >("GroupRule", +// init()) +// .def("__init__", boost::python::make_constructor(group_rule_construct1)) +// .def("append", &group_rule::append) +// .def("set_filter", &group_rule::set_filter) +// .def("set_repeat_key", &group_rule::set_repeat_key) +// ; + +// class_("SimpleRowLayout") +// .def("item_margin", &simple_row_layout::get_item_margin) +// .def("set_item_margin", &simple_row_layout::set_item_margin) +// ; + +// class_("PairLayout") +// .def("item_margin", &simple_row_layout::get_item_margin) +// .def("set_item_margin", &simple_row_layout::set_item_margin) +// .def("max_difference", &pair_layout::get_max_difference) +// .def("set_max_difference", &pair_layout::set_max_difference) +// ; + +// class_ >("GroupSymbolizerProperties") +// .def("add_rule", &group_symbolizer_properties::add_rule) +// .def("set_layout", &group_symbolizer_properties_set_layout_simple) +// .def("set_layout", &group_symbolizer_properties_set_layout_pair) +// ; + +// class_ >("GroupSymbolizer", +// init<>("Default GroupSymbolizer")) +// .def("__hash__",hash_impl_2) +// ; + +// } diff --git a/src/mapnik_symbolizer.hpp b/src/mapnik_symbolizer.hpp new file mode 100644 index 000000000..629dc655e --- /dev/null +++ b/src/mapnik_symbolizer.hpp @@ -0,0 +1,297 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +#ifndef MAPNIK_SYMBOLIZER_INCLUDED +#define MAPNIK_SYMBOLIZER_INCLUDED + +#include +#include +#include +#include +#include +#include +#include +#include +#include +//pybind11 +#include + +PYBIND11_MAKE_OPAQUE(mapnik::symbolizer); +PYBIND11_MAKE_OPAQUE(mapnik::path_expression); + +namespace py = pybind11; + +namespace python_mapnik { + +using mapnik::symbolizer; +using mapnik::symbolizer_base; +using mapnik::parse_path; +using mapnik::path_processor; + + +template +struct enum_converter +{ + static auto apply(mapnik::enumeration_wrapper const& wrapper, mapnik::keys key) -> py::object + { + return py::cast(TargetType(wrapper.value)); + } +}; + +template <> +struct enum_converter +{ + static auto apply(mapnik::enumeration_wrapper const& wrapper, mapnik::keys key) -> py::object + { + auto meta = mapnik::get_meta(key); + auto const& convert_fun_ptr(std::get<1>(meta)); + if (convert_fun_ptr) + { + return py::cast(convert_fun_ptr(wrapper)); + } + throw pybind11::key_error("Invalid property name"); + } +}; + +template +struct extract_python_object +{ + using result_type = py::object; + mapnik::keys key_; + extract_python_object(mapnik::keys key) + : key_(key) {} + + auto operator() (mapnik::value_bool val) const -> result_type + { + return py::bool_(val); + } + + auto operator() (mapnik::value_double val) const -> result_type + { + return py::float_(val); + } + + auto operator() (mapnik::value_integer val) const -> result_type + { + return py::int_(val); + } + + auto operator() (mapnik::color const& col) const -> result_type + { + return py::cast(col); + } + + auto operator() (mapnik::expression_ptr const& expr) const -> result_type + { + return py::cast(expr); + } + + auto operator() (mapnik::path_expression_ptr const& expr) const ->result_type + { + return py::cast(expr); + } + + auto operator() (mapnik::enumeration_wrapper const& wrapper) const ->result_type + { + return enum_converter::apply(wrapper, key_); + } + + auto operator() (mapnik::transform_list_ptr const& expr) const ->result_type + { + if (expr) return py::cast(mapnik::transform_processor_type::to_string(*expr)); + return py::none(); + } + + auto operator() (mapnik::raster_colorizer_ptr const& colorizer) const ->result_type + { + return py::cast(colorizer); + } + + template + auto operator() (T const& val) const -> result_type + { + std::cerr << "Can't convert to Python object [" << typeid(val).name() << "]" << std::endl; + return py::none(); + } +}; + +template +py::object get_property(Symbolizer const& sym) +{ + using const_iterator = symbolizer_base::cont_type::const_iterator; + const_iterator itr = sym.properties.find(Key); + if (itr != sym.properties.end()) + { + return mapnik::util::apply_visitor(extract_python_object(Key), itr->second); + } + //throw pybind11::key_error("Invalid property name"); + return py::none(); +} + +template +void set_color_property(Symbolizer & sym, py::object const& obj) +{ + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else if (py::isinstance(obj)) + { + mapnik::put(sym, Key, mapnik::color(obj.cast())); + } + else throw pybind11::value_error(); +} + +template +void set_boolean_property(Symbolizer & sym, py::object const& obj) +{ + + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else throw pybind11::value_error(); +} + +template +void set_integer_property(Symbolizer & sym, py::object const& obj) +{ + + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else throw pybind11::value_error(); +} + +template +void set_double_property(Symbolizer & sym, py::object const& obj) +{ + + if (py::isinstance(obj) || py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else throw pybind11::value_error(); +} + +template +void set_enum_property(Symbolizer & sym, py::object const& obj) +{ + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else throw pybind11::value_error(); +} + +template +void set_path_property(Symbolizer & sym, py::object const& obj) +{ + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, parse_path(obj.cast())); + } + else if (py::isinstance(obj)) + { + auto expr = obj.cast(); + mapnik::put(sym, Key, expr); + } + else throw pybind11::value_error(); +} + +template +void set_colorizer_property(Symbolizer & sym, py::object const& obj) +{ + if (py::isinstance(obj)) + { + mapnik::put(sym, Key, obj.cast()); + } + else throw pybind11::value_error(); +} + +inline std::size_t hash_impl(symbolizer const& sym) +{ + return mapnik::util::apply_visitor(mapnik::symbolizer_hash_visitor(), sym); +} + +template +std::size_t hash_impl_2(T const& sym) +{ + return mapnik::symbolizer_hash::value(sym); +} + +template +auto get(symbolizer_base const& sym) -> Value +{ + return mapnik::get(sym, Key); +} + +template +void set(symbolizer_base & sym, Value const& val) +{ + mapnik::put(sym, Key, val); +} + +template +std::string get_transform(symbolizer_base const& sym) +{ + auto expr = mapnik::get(sym, Key); + if (expr) + return mapnik::transform_processor_type::to_string(*expr); + return ""; +} + +template +void set_transform(symbolizer_base & sym, std::string const& str) +{ + mapnik::put(sym, Key, mapnik::parse_transform(str)); +} + +} // namespace python_mapnik + +#endif //MAPNIK_SYMBOLIZER_INCLUDED diff --git a/src/mapnik_text_placement.cpp b/src/mapnik_text_placement.cpp deleted file mode 100644 index 7bab210bd..000000000 --- a/src/mapnik_text_placement.cpp +++ /dev/null @@ -1,588 +0,0 @@ -/***************************************************************************** - * - * This file is part of Mapnik (c++ mapping toolkit) - * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - *****************************************************************************/ - -#include - -// boost -#include "boost_std_shared_shim.hpp" -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#include -#include -#pragma GCC diagnostic pop - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "mapnik_enumeration.hpp" -#include "mapnik_threads.hpp" - -using namespace mapnik; - -/* Notes: - Overriding functions in inherited classes: - boost.python documentation doesn't really tell you how to do it. - But this helps: - http://www.gamedev.net/topic/446225-inheritance-in-boostpython/ - - register_ptr_to_python is required for wrapped classes, but not for unwrapped. - - Functions don't have to be members of the class, but can also be - normal functions taking a ref to the class as first parameter. -*/ - -namespace { - -using namespace boost::python; - -// This class works around a feature in boost python. -// See http://osdir.com/ml/python.c++/2003-11/msg00158.html - -template -class class_with_converter : public boost::python::class_ -{ -public: - using self = class_with_converter; - // Construct with the class name, with or without docstring, and default __init__() function - class_with_converter(char const* name, char const* doc = 0) : boost::python::class_(name, doc) { } - - // Construct with class name, no docstring, and an uncallable __init__ function - class_with_converter(char const* name, boost::python::no_init_t y) : boost::python::class_(name, y) { } - - // Construct with class name, docstring, and an uncallable __init__ function - class_with_converter(char const* name, char const* doc, boost::python::no_init_t y) : boost::python::class_(name, doc, y) { } - - // Construct with class name and init<> function - template class_with_converter(char const* name, boost::python::init_base const& i) - : boost::python::class_(name, i) { } - - // Construct with class name, docstring and init<> function - template - inline class_with_converter(char const* name, char const* doc, boost::python::init_base const& i) - : boost::python::class_(name, doc, i) { } - - template - self& def_readwrite_convert(char const* name, D const& d, char const* /*doc*/=0) - { - this->add_property(name, - boost::python::make_getter(d, boost::python::return_value_policy()), - boost::python::make_setter(d, boost::python::default_call_policies())); - return *this; - } -}; - -/* -boost::python::tuple get_displacement(text_layout_properties const& t) -{ - return boost::python::make_tuple(0.0,0.0);// FIXME t.displacement.x, t.displacement.y); -} - -void set_displacement(text_layout_properties &t, boost::python::tuple arg) -{ - if (len(arg) != 2) - { - PyErr_SetObject(PyExc_ValueError, - ("expected 2-item tuple in call to set_displacement; got %s" - % arg).ptr() - ); - throw_error_already_set(); - } - - //double x = extract(arg[0]); - //double y = extract(arg[1]); - //t.displacement.set(x, y); FIXME -} - - -struct NodeWrap - : formatting::node, wrapper -{ - NodeWrap() - : formatting::node(), wrapper() {} - - void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - python_block_auto_unblock b; - this->get_override("apply")(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - - virtual void add_expressions(expression_set &output) const - { - override o = this->get_override("add_expressions"); - if (o) - { - python_block_auto_unblock b; - o(ptr(&output)); - } else - { - formatting::node::add_expressions(output); - } - } - - void default_add_expressions(expression_set &output) const - { - formatting::node::add_expressions(output); - } -}; -*/ -/* -struct TextNodeWrap - : formatting::text_node, wrapper -{ - TextNodeWrap(expression_ptr expr) - : formatting::text_node(expr), wrapper() {} - - TextNodeWrap(std::string expr_text) - : formatting::text_node(expr_text), wrapper() {} - - virtual void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - if(override o = this->get_override("apply")) - { - python_block_auto_unblock b; - o(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - else - { - formatting::text_node::apply(p, feature, vars, output); - } - } - - void default_apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - formatting::text_node::apply(p, feature, vars, output); - } -}; -*/ -/* -struct FormatNodeWrap - : formatting::format_node, wrapper -{ - virtual void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - if(override o = this->get_override("apply")) - { - python_block_auto_unblock b; - o(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - else - { - formatting::format_node::apply(p, feature, vars ,output); - } - } - - void default_apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - formatting::format_node::apply(p, feature, vars, output); - } -}; - -struct ExprFormatWrap: formatting::expression_format, wrapper -{ - virtual void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - if(override o = this->get_override("apply")) - { - python_block_auto_unblock b; - o(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - else - { - formatting::expression_format::apply(p, feature, vars, output); - } - } - - void default_apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - formatting::expression_format::apply(p, feature, vars, output); - } -}; - -struct LayoutNodeWrap: formatting::layout_node, wrapper -{ - virtual void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - if(override o = this->get_override("apply")) - { - python_block_auto_unblock b; - o(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - else - { - formatting::layout_node::apply(p, feature, vars, output); - } - } - - void default_apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - formatting::layout_node::apply(p, feature, vars, output); - } -}; - -struct ListNodeWrap: formatting::list_node, wrapper -{ - //Default constructor - ListNodeWrap() : formatting::list_node(), wrapper() - { - } - - //Special constructor: Takes a python sequence as its argument - ListNodeWrap(object l) : formatting::list_node(), wrapper() - { - stl_input_iterator begin(l), end; - while (begin != end) - { - children_.push_back(*begin); - ++begin; - } - } - - // TODO: Add constructor taking variable number of arguments. - http://wiki.python.org/moin/boost.python/HowTo#A.22Raw.22_function - - virtual void apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - if(override o = this->get_override("apply")) - { - python_block_auto_unblock b; - o(ptr(&p), ptr(&feature), ptr(&vars), ptr(&output)); - } - else - { - formatting::list_node::apply(p, feature, vars, output); - } - } - - void default_apply(evaluated_format_properties_ptr p, feature_impl const& feature, attributes const& vars, text_layout &output) const - { - formatting::list_node::apply(p, feature, vars, output); - } - - inline void IndexError(){ - PyErr_SetString(PyExc_IndexError, "Index out of range"); - throw_error_already_set(); - } - - unsigned get_length() - { - return children_.size(); - } - - formatting::node_ptr get_item(int i) - { - if (i < 0) i+= children_.size(); - if (i < static_cast(children_.size())) return children_[i]; - IndexError(); - return formatting::node_ptr(); //Avoid compiler warning - } - - void set_item(int i, formatting::node_ptr ptr) - { - if (i < 0) i+= children_.size(); - if (i < static_cast(children_.size())) children_[i] = ptr; - IndexError(); - } - - void append(formatting::node_ptr ptr) - { - children_.push_back(ptr); - } -}; -*/ -/* -struct TextPlacementsWrap: text_placements, wrapper -{ - text_placement_info_ptr get_placement_info(double scale_factor_) const - { - python_block_auto_unblock b; - //return this->get_override("get_placement_info")(); - return text_placement_info_ptr(); - } -}; - -struct TextPlacementInfoWrap: text_placement_info, wrapper -{ - TextPlacementInfoWrap(text_placements const* parent, - double scale_factor_) - : text_placement_info(parent, scale_factor_) - { - - } - - bool next() - { - python_block_auto_unblock b; - return this->get_override("next")(); - } -}; - -void insert_expression(expression_set *set, expression_ptr p) -{ - set->insert(p); -} - - -evaluated_format_properties_ptr get_format(text_symbolizer const& sym) -{ - return sym.get_placement_options()->defaults.format; -} - -void set_format(text_symbolizer const& sym, evaluated_format_properties_ptr format) -{ - sym.get_placement_options()->defaults.format = format; -} - -text_symbolizer_properties & get_properties(text_symbolizer const& sym) -{ - return sym.get_placement_options()->defaults; -} - -void set_properties(text_symbolizer const& sym, text_symbolizer_properties & defaults) -{ - sym.get_placement_options()->defaults = defaults; -} -*/ -} - -void export_text_placement() -{ - /* - using namespace boost::python; - - enumeration_("label_placement") - .value("LINE_PLACEMENT",LINE_PLACEMENT) - .value("POINT_PLACEMENT",POINT_PLACEMENT) - .value("VERTEX_PLACEMENT",VERTEX_PLACEMENT) - .value("INTERIOR_PLACEMENT",INTERIOR_PLACEMENT) - ; - enumeration_("vertical_alignment") - .value("TOP",V_TOP) - .value("MIDDLE",V_MIDDLE) - .value("BOTTOM",V_BOTTOM) - .value("AUTO",V_AUTO) - ; - - enumeration_("horizontal_alignment") - .value("LEFT",H_LEFT) - .value("MIDDLE",H_MIDDLE) - .value("RIGHT",H_RIGHT) - .value("AUTO",H_AUTO) - ; - - enumeration_("justify_alignment") - .value("LEFT",J_LEFT) - .value("MIDDLE",J_MIDDLE) - .value("RIGHT",J_RIGHT) - .value("AUTO", J_AUTO) - ; - - enumeration_("text_transform") - .value("NONE",NONE) - .value("UPPERCASE",UPPERCASE) - .value("LOWERCASE",LOWERCASE) - .value("CAPITALIZE",CAPITALIZE) - ; - - enumeration_("halo_rasterizer") - .value("FULL",HALO_RASTERIZER_FULL) - .value("FAST",HALO_RASTERIZER_FAST) - ; - */ - class_("TextSymbolizer", - init<>()) - ; - /* - - class_with_converter - ("TextSymbolizerProperties") - .def_readwrite_convert("label_placement", &text_symbolizer_properties::label_placement) - .def_readwrite_convert("upright", &text_symbolizer_properties::upright) - .def_readwrite("label_spacing", &text_symbolizer_properties::label_spacing) - .def_readwrite("label_position_tolerance", &text_symbolizer_properties::label_position_tolerance) - .def_readwrite("avoid_edges", &text_symbolizer_properties::avoid_edges) - .def_readwrite("margin", &text_symbolizer_properties::margin) - .def_readwrite("repeat_distance", &text_symbolizer_properties::repeat_distance) - .def_readwrite("minimum_distance", &text_symbolizer_properties::minimum_distance) - .def_readwrite("minimum_padding", &text_symbolizer_properties::minimum_padding) - .def_readwrite("minimum_path_length", &text_symbolizer_properties::minimum_path_length) - .def_readwrite("maximum_angle_char_delta", &text_symbolizer_properties::max_char_angle_delta) - .def_readwrite("allow_overlap", &text_symbolizer_properties::allow_overlap) - .def_readwrite("largest_bbox_only", &text_symbolizer_properties::largest_bbox_only) - .def_readwrite("layout_defaults", &text_symbolizer_properties::layout_defaults) - //.def_readwrite("format", &text_symbolizer_properties::format) - .add_property ("format_tree", - &text_symbolizer_properties::format_tree, - &text_symbolizer_properties::set_format_tree); - //from_xml, to_xml operate on mapnik's internal XML tree and don't make sense in python. - // add_expressions isn't useful in python either. The result is only needed by - // attribute_collector (which isn't exposed in python) and - // it just calls add_expressions of the associated formatting tree. - // set_old_style expression is just a compatibility wrapper and doesn't need to be exposed in python. - ; - - class_with_converter - ("TextLayoutProperties") - .def_readwrite_convert("horizontal_alignment", &text_layout_properties::halign) - .def_readwrite_convert("justify_alignment", &text_layout_properties::jalign) - .def_readwrite_convert("vertical_alignment", &text_layout_properties::valign) - .def_readwrite("text_ratio", &text_layout_properties::text_ratio) - .def_readwrite("wrap_width", &text_layout_properties::wrap_width) - .def_readwrite("wrap_before", &text_layout_properties::wrap_before) - .def_readwrite("orientation", &text_layout_properties::orientation) - .def_readwrite("rotate_displacement", &text_layout_properties::rotate_displacement) - .add_property("displacement", &get_displacement, &set_displacement); - - class_with_converter - ("CharProperties") - .def_readwrite_convert("text_transform", &detail::evaluated_format_properties::text_transform) - .def_readwrite_convert("fontset", &detail::evaluated_format_properties::fontset) - .def(init()) //Copy constructor - .def_readwrite("face_name", &detail::evaluated_format_properties::face_name) - .def_readwrite("text_size", &detail::evaluated_format_properties::text_size) - .def_readwrite("character_spacing", &detail::evaluated_format_properties::character_spacing) - .def_readwrite("line_spacing", &detail::evaluated_format_properties::line_spacing) - .def_readwrite("text_opacity", &detail::evaluated_format_properties::text_opacity) - .def_readwrite("fill", &detail::evaluated_format_properties::fill) - .def_readwrite("halo_fill", &detail::evaluated_format_properties::halo_fill) - .def_readwrite("halo_radius", &evaluated_format_properties::halo_radius) - //from_xml, to_xml operate on mapnik's internal XML tree and don't make sense in python. - ; - class_, - boost::noncopyable> - ("TextPlacements") - .def_readwrite("defaults", &text_placements::defaults) - //.def("get_placement_info", pure_virtual(&text_placements::get_placement_info)) - // TODO: add_expressions() - ; - register_ptr_to_python >(); - - class_, - boost::noncopyable> - ("TextPlacementInfo", - init()) - .def("next", pure_virtual(&text_placement_info::next)) - .def_readwrite("properties", &text_placement_info::properties) - .def_readwrite("scale_factor", &text_placement_info::scale_factor) - ; - register_ptr_to_python >(); - - - class_, - boost::noncopyable>("ExpressionSet") - .def("insert", &insert_expression); - ; - - class_, - boost::noncopyable>("FormattingNode") - .def("apply", pure_virtual(&formatting::node::apply)) - .def("add_expressions", pure_virtual(&formatting::node::add_expressions)) - .def("to_xml", pure_virtual(&formatting::node::to_xml)) - ; - - register_ptr_to_python >(); - - class_, - bases,boost::noncopyable>("FormattingText", init()) - .def(init()) - .def("apply", &formatting::text_node::apply)//, &TextNodeWrap::default_apply) - .add_property("text",&formatting::text_node::get_text, &formatting::text_node::set_text) - ; - - register_ptr_to_python >(); - - class_with_converter, - bases, - boost::noncopyable> - ("FormattingFormat") - .def_readwrite_convert("text_size", &formatting::format_node::text_size) - .def_readwrite_convert("face_name", &formatting::format_node::face_name) - .def_readwrite_convert("character_spacing", &formatting::format_node::character_spacing) - .def_readwrite_convert("line_spacing", &formatting::format_node::line_spacing) - .def_readwrite_convert("text_opacity", &formatting::format_node::text_opacity) - .def_readwrite_convert("text_transform", &formatting::format_node::text_transform) - .def_readwrite_convert("fill", &formatting::format_node::fill) - .def_readwrite_convert("halo_fill", &formatting::format_node::halo_fill) - .def_readwrite_convert("halo_radius", &formatting::format_node::halo_radius) - .def("apply", &formatting::format_node::apply, &FormatNodeWrap::default_apply) - .add_property("child", - &formatting::format_node::get_child, - &formatting::format_node::set_child) - ; - register_ptr_to_python >(); - - class_, - bases, - boost::noncopyable> - ("FormattingList", init<>()) - .def(init()) - .def("append", &formatting::list_node::push_back) - .def("apply", &formatting::list_node::apply, &ListNodeWrap::default_apply) - .def("__len__", &ListNodeWrap::get_length) - .def("__getitem__", &ListNodeWrap::get_item) - .def("__setitem__", &ListNodeWrap::set_item) - .def("append", &ListNodeWrap::append) - ; - - register_ptr_to_python >(); - - class_, - bases, - boost::noncopyable> - ("FormattingExpressionFormat") - .def_readwrite("text_size", &formatting::expression_format::text_size) - .def_readwrite("face_name", &formatting::expression_format::face_name) - .def_readwrite("character_spacing", &formatting::expression_format::character_spacing) - .def_readwrite("line_spacing", &formatting::expression_format::line_spacing) - .def_readwrite("text_opacity", &formatting::expression_format::text_opacity) - .def_readwrite("fill", &formatting::expression_format::fill) - .def_readwrite("halo_fill", &formatting::expression_format::halo_fill) - .def_readwrite("halo_radius", &formatting::expression_format::halo_radius) - .def("apply", &formatting::expression_format::apply, &ExprFormatWrap::default_apply) - .add_property("child", - &formatting::expression_format::get_child, - &formatting::expression_format::set_child) - ; - register_ptr_to_python >(); -*/ - //TODO: registry -} diff --git a/src/mapnik_text_symbolizer.cpp b/src/mapnik_text_symbolizer.cpp new file mode 100644 index 000000000..59651ac76 --- /dev/null +++ b/src/mapnik_text_symbolizer.cpp @@ -0,0 +1,116 @@ +/***************************************************************************** + * + * This file is part of Mapnik (c++ mapping toolkit) + * + * Copyright (C) 2024 Artem Pavlenko + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + *****************************************************************************/ + +// mapnik +#include + +#include +#include +#include +#include +#include +#include +#include "mapnik_symbolizer.hpp" + +//pybind11 +#include +#include +#include +#include +#include + +namespace py = pybind11; + +namespace { + +//text symbolizer +mapnik::text_placements_ptr get_placement_finder(mapnik::text_symbolizer const& sym) +{ + return mapnik::get(sym, mapnik::keys::text_placements_); +} + +void set_placement_finder(mapnik::text_symbolizer & sym, std::shared_ptr const& finder) +{ + mapnik::put(sym, mapnik::keys::text_placements_, finder); +} + +} + +void export_text_symbolizer(py::module const& m) +{ + using namespace python_mapnik; + using mapnik::text_symbolizer; + + py::native_enum(m, "LabelPlacement", "enum.Enum") + .value("LINE_PLACEMENT", mapnik::label_placement_enum::LINE_PLACEMENT) + .value("POINT_PLACEMENT", mapnik::label_placement_enum::POINT_PLACEMENT) + .value("VERTEX_PLACEMENT", mapnik::label_placement_enum::VERTEX_PLACEMENT) + .value("INTERIOR_PLACEMENT", mapnik::label_placement_enum::INTERIOR_PLACEMENT) + .finalize() + ; + +// mapnik::enumeration_("vertical_alignment") +// .value("TOP", mapnik::vertical_alignment_enum::V_TOP) +// .value("MIDDLE", mapnik::vertical_alignment_enum::V_MIDDLE) +// .value("BOTTOM", mapnik::vertical_alignment_enum::V_BOTTOM) +// .value("AUTO", mapnik::vertical_alignment_enum::V_AUTO); + +// mapnik::enumeration_("horizontal_alignment") +// .value("LEFT", mapnik::horizontal_alignment_enum::H_LEFT) +// .value("MIDDLE", mapnik::horizontal_alignment_enum::H_MIDDLE) +// .value("RIGHT", mapnik::horizontal_alignment_enum::H_RIGHT) +// .value("AUTO", mapnik::horizontal_alignment_enum::H_AUTO); + +// mapnik::enumeration_("justify_alignment") +// .value("LEFT", mapnik::justify_alignment_enum::J_LEFT) +// .value("MIDDLE", mapnik::justify_alignment_enum::J_MIDDLE) +// .value("RIGHT", mapnik::justify_alignment_enum::J_RIGHT) +// .value("AUTO", mapnik::justify_alignment_enum::J_AUTO); + +// mapnik::enumeration_("text_transform") +// .value("NONE", mapnik::text_transform_enum::NONE) +// .value("UPPERCASE", mapnik::text_transform_enum::UPPERCASE) +// .value("LOWERCASE", mapnik::text_transform_enum::LOWERCASE) +// .value("CAPITALIZE", mapnik::text_transform_enum::CAPITALIZE); + + py::native_enum(m, "halo_rasterizer", "enum.Enum") + .value("FULL", mapnik::halo_rasterizer_enum::HALO_RASTERIZER_FULL) + .value("FAST", mapnik::halo_rasterizer_enum::HALO_RASTERIZER_FAST) + .finalize(); + + + // set_symbolizer_property(sym, keys::halo_comp_op, node); + // set_symbolizer_property(sym, keys::halo_rasterizer, node); + // set_symbolizer_property(sym, keys::halo_transform, node); + // set_symbolizer_property(sym, keys::offset, node); + + py::class_(m, "TextSymbolizer") + .def(py::init<>(), "Default ctor") + .def("__hash__",hash_impl_2) + .def_property("placement_finder", &get_placement_finder, &set_placement_finder, "Placement finder") + .def_property("halo_comp_op", + &get, + &set_enum_property, + "Composite mode (comp-op)") + ; + +} diff --git a/src/mapnik_threads.hpp b/src/mapnik_threads.hpp deleted file mode 100644 index 25b55871d..000000000 --- a/src/mapnik_threads.hpp +++ /dev/null @@ -1,109 +0,0 @@ -/***************************************************************************** - * - * This file is part of Mapnik (c++ mapping toolkit) - * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - *****************************************************************************/ -#ifndef MAPNIK_THREADS_HPP -#define MAPNIK_THREADS_HPP - -#include -#include - -namespace mapnik { -class python_thread -{ - /* Docs: - http://docs.python.org/c-api/init.html#thread-state-and-the-global-interpreter-lock - */ -public: - static void unblock() - { -#ifdef MAPNIK_DEBUG - if (state.get()) - { - std::cerr << "ERROR: Python threads are already unblocked. " - "Unblocking again will loose the current state and " - "might crash later. Aborting!\n"; - abort(); //This is a serious error and can't be handled in any other sane way - } -#endif - PyThreadState *_save = 0; //Name defined by python - Py_UNBLOCK_THREADS; - state.reset(_save); -#ifdef MAPNIK_DEBUG - if (!_save) { - thread_support = false; - } -#endif - } - - static void block() - { -#ifdef MAPNIK_DEBUG - if (thread_support && !state.get()) - { - std::cerr << "ERROR: Trying to restore python thread state, " - "but no state is saved. Can't continue and also " - "can't raise an exception because the python " - "interpreter might be non-function. Aborting!\n"; - abort(); - } -#endif - PyThreadState *_save = state.release(); //Name defined by python - Py_BLOCK_THREADS; - } - -private: - static boost::thread_specific_ptr state; -#ifdef MAPNIK_DEBUG - static bool thread_support; -#endif -}; - -class python_block_auto_unblock -{ -public: - python_block_auto_unblock() - { - python_thread::block(); - } - - ~python_block_auto_unblock() - { - python_thread::unblock(); - } -}; - -class python_unblock_auto_block -{ -public: - python_unblock_auto_block() - { - python_thread::unblock(); - } - - ~python_unblock_auto_block() - { - python_thread::block(); - } -}; - -} //namespace - -#endif // MAPNIK_THREADS_HPP diff --git a/src/mapnik_value_converter.hpp b/src/mapnik_value_converter.hpp index dbb9e879e..4a04094ae 100644 --- a/src/mapnik_value_converter.hpp +++ b/src/mapnik_value_converter.hpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -19,72 +19,186 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ + #ifndef MAPNIK_PYTHON_BINDING_VALUE_CONVERTER_INCLUDED #define MAPNIK_PYTHON_BINDING_VALUE_CONVERTER_INCLUDED // mapnik #include -#include -// boost -#include -#include +#include +#include +//pybind11 +#include -namespace boost { namespace python { +namespace { - struct value_converter +struct value_converter +{ + PyObject * operator() (mapnik::value_integer val) const { - PyObject * operator() (mapnik::value_integer val) const - { - return ::PyLong_FromLongLong(val); - } + return ::PyLong_FromLongLong(val); + } + + PyObject * operator() (mapnik::value_double val) const + { + return ::PyFloat_FromDouble(val); + } + + PyObject * operator() (mapnik::value_bool val) const + { + return ::PyBool_FromLong(val); + } + + PyObject * operator() (std::string const& s) const + { + return ::PyUnicode_DecodeUTF8(s.c_str(), static_cast(s.length()),0); + } + + PyObject * operator() (mapnik::value_unicode_string const& s) const + { + const char* data = reinterpret_cast(s.getBuffer()); + Py_ssize_t size = static_cast(s.length() * sizeof(s[0])); + return ::PyUnicode_DecodeUTF16(data, size, nullptr, nullptr); + } + + PyObject * operator() (mapnik::value_null const& /*s*/) const + { + Py_RETURN_NONE; + } +}; + +} // namespace + +struct mapnik_value_to_python +{ + static PyObject* convert(mapnik::value const& v) + { + return mapnik::util::apply_visitor(value_converter(),v); + } +}; + +struct mapnik_param_to_python +{ + static PyObject* convert(mapnik::value_holder const& v) + { + return mapnik::util::apply_visitor(value_converter(),v); + } +}; + + + +namespace PYBIND11_NAMESPACE { namespace detail { - PyObject * operator() (mapnik::value_double val) const +template <> +struct type_caster +{ + mapnik::transcoder const tr_{"utf8"}; +public: + + PYBIND11_TYPE_CASTER(mapnik::value, const_name("Value")); + + bool load(handle src, bool) + { + PyObject *source = src.ptr(); + if (PyUnicode_Check(source)) { - return ::PyFloat_FromDouble(val); + PyObject* tmp = PyUnicode_AsUTF8String(source); + if (!tmp) return false; + char* c_str = PyBytes_AsString(tmp); + value = tr_.transcode(c_str); + Py_DecRef(tmp); + return !PyErr_Occurred(); } - - PyObject * operator() (mapnik::value_bool val) const + else if (PyBool_Check(source)) { - return ::PyBool_FromLong(val); + value = (source == Py_True) ? true : false; + return !PyErr_Occurred(); } - - PyObject * operator() (std::string const& s) const + else if (PyFloat_Check(source)) { - return ::PyUnicode_DecodeUTF8(s.c_str(),implicit_cast(s.length()),0); + PyObject *tmp = PyNumber_Float(source); + if (!tmp) return false; + value = PyFloat_AsDouble(tmp); + Py_DecRef(tmp); + return !PyErr_Occurred(); } - - PyObject * operator() (mapnik::value_unicode_string const& s) const + else if(PyLong_Check(source)) { - std::string buffer; - mapnik::to_utf8(s,buffer); - return ::PyUnicode_DecodeUTF8(buffer.c_str(),implicit_cast(buffer.length()),0); + PyObject *tmp = PyNumber_Long(source); + if (!tmp) return false; + value = PyLong_AsLong(tmp); + Py_DecRef(tmp); + return !PyErr_Occurred(); } - - PyObject * operator() (mapnik::value_null const& /*s*/) const + else if (source == Py_None) { - Py_RETURN_NONE; + value = mapnik::value_null{}; + return true; } - }; - + return false; + } - struct mapnik_value_to_python + static handle cast(mapnik::value src, return_value_policy /*policy*/, handle /*parent*/) { - static PyObject* convert(mapnik::value const& v) - { - return mapnik::util::apply_visitor(value_converter(),v); - } + return mapnik_value_to_python::convert(src); + } +}; + +template <> +struct type_caster +{ +public: - }; + PYBIND11_TYPE_CASTER(mapnik::value_holder, const_name("ValueHolder")); - struct mapnik_param_to_python + bool load(handle src, bool) { - static PyObject* convert(mapnik::value_holder const& v) + PyObject *source = src.ptr(); + if (PyUnicode_Check(source)) { - return mapnik::util::apply_visitor(value_converter(),v); + PyObject* tmp = PyUnicode_AsUTF8String(source); + if (!tmp) return false; + char* c_str = PyBytes_AsString(tmp); + value = std::string(c_str); + Py_DecRef(tmp); + return !PyErr_Occurred(); } - }; + else if (PyBool_Check(source)) + { + value = (source == Py_True) ? true : false; + return !PyErr_Occurred(); + } + else if (PyFloat_Check(source)) + { + PyObject *tmp = PyNumber_Float(source); + if (!tmp) return false; + value = PyFloat_AsDouble(tmp); + Py_DecRef(tmp); + return !PyErr_Occurred(); + } + else if(PyLong_Check(source)) + { + PyObject *tmp = PyNumber_Long(source); + if (!tmp) return false; + value = static_cast(PyLong_AsLong(tmp)); + Py_DecRef(tmp); + return !PyErr_Occurred(); + } + else if (source == Py_None) + { + value = mapnik::value_null{}; + return true; + } + return false; + } + + static handle cast(mapnik::value_holder src, return_value_policy /*policy*/, handle /*parent*/) + { + return mapnik_param_to_python::convert(src); + } +}; +}} // namespace PYBIND11_NAMESPACE::detail -}} #endif // MAPNIK_PYTHON_BINDING_VALUE_CONVERTER_INCLUDED diff --git a/src/mapnik_view_transform.cpp b/src/mapnik_view_transform.cpp index 8dc177c09..6cd3057d5 100644 --- a/src/mapnik_view_transform.cpp +++ b/src/mapnik_view_transform.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -22,13 +22,8 @@ #include -// boost #pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - +#include #include #pragma GCC diagnostic pop diff --git a/src/python_grid_utils.cpp b/src/python_grid_utils.cpp index 62dba2bb9..6ac426cd1 100644 --- a/src/python_grid_utils.cpp +++ b/src/python_grid_utils.cpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -21,43 +21,33 @@ *****************************************************************************/ #if defined(GRID_RENDERER) - -#include - -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" - -#include -#pragma GCC diagnostic pop - // mapnik +#include #include #include #include #include #include -#include #include -#include +#include #include #include #include "python_grid_utils.hpp" - +#include "mapnik_value_converter.hpp" // stl #include +#include namespace mapnik { + template void grid2utf(T const& grid_type, - boost::python::list& l, - std::vector& key_order) + py::list& l, + std::vector& key_order) { + using code_point_t = std::uint32_t; using keys_type = std::map< typename T::lookup_type, typename T::value_type>; using keys_iterator = typename keys_type::iterator; @@ -69,13 +59,13 @@ void grid2utf(T const& grid_type, // start counting at utf8 codepoint 32, aka space character std::uint16_t codepoint = 32; - unsigned array_size = data.width(); - for (unsigned y = 0; y < data.height(); ++y) + std::size_t array_size = data.width(); + for (std::size_t y = 0; y < data.height(); ++y) { std::uint16_t idx = 0; - const std::unique_ptr line(new Py_UNICODE[array_size]); + const std::unique_ptr line(new code_point_t[array_size]); typename T::value_type const* row = data.get_row(y); - for (unsigned x = 0; x < data.width(); ++x) + for (std::size_t x = 0; x < data.width(); ++x) { typename T::value_type feature_id = row[x]; feature_pos = feature_keys.find(feature_id); @@ -99,29 +89,28 @@ void grid2utf(T const& grid_type, keys[val] = codepoint; key_order.push_back(val); } - line[idx++] = static_cast(codepoint); + line[idx++] = static_cast(codepoint); ++codepoint; } else { - line[idx++] = static_cast(key_pos->second); + line[idx++] = static_cast(key_pos->second); } } // else, shouldn't get here... } - l.append(boost::python::object( - boost::python::handle<>( - PyUnicode_FromUnicode(line.get(), array_size)))); + l.append(PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, line.get(), array_size)); } } template void grid2utf(T const& grid_type, - boost::python::list& l, + py::list& l, std::vector& key_order, unsigned int resolution) { + using code_point_t = std::uint32_t; using keys_type = std::map< typename T::lookup_type, typename T::value_type>; using keys_iterator = typename keys_type::iterator; @@ -136,7 +125,7 @@ void grid2utf(T const& grid_type, for (unsigned y = 0; y < grid_type.height(); y=y+resolution) { std::uint16_t idx = 0; - const std::unique_ptr line(new Py_UNICODE[array_size]); + const std::unique_ptr line(new code_point_t[array_size]); mapnik::grid::value_type const* row = grid_type.get_row(y); for (unsigned x = 0; x < grid_type.width(); x=x+resolution) { @@ -162,86 +151,24 @@ void grid2utf(T const& grid_type, keys[val] = codepoint; key_order.push_back(val); } - line[idx++] = static_cast(codepoint); - ++codepoint; - } - else - { - line[idx++] = static_cast(key_pos->second); - } - } - // else, shouldn't get here... - } - l.append(boost::python::object( - boost::python::handle<>( - PyUnicode_FromUnicode(line.get(), array_size)))); - } -} - - -template -void grid2utf2(T const& grid_type, - boost::python::list& l, - std::vector& key_order, - unsigned int resolution) -{ - using keys_type = std::map< typename T::lookup_type, typename T::value_type>; - using keys_iterator = typename keys_type::iterator; - - typename T::data_type const& data = grid_type.data(); - typename T::feature_key_type const& feature_keys = grid_type.get_feature_keys(); - typename T::feature_key_type::const_iterator feature_pos; - - keys_type keys; - // start counting at utf8 codepoint 32, aka space character - uint16_t codepoint = 32; - - mapnik::grid::data_type target(data.width()/resolution,data.height()/resolution); - mapnik::scale_grid(target,grid_type.data(),0.0,0.0); - - unsigned array_size = target.width(); - for (unsigned y = 0; y < target.height(); ++y) - { - uint16_t idx = 0; - const std::unique_ptr line(new Py_UNICODE[array_size]); - mapnik::grid::value_type * row = target.get_row(y); - unsigned x; - for (x = 0; x < target.width(); ++x) - { - feature_pos = feature_keys.find(row[x]); - if (feature_pos != feature_keys.end()) - { - mapnik::grid::lookup_type val = feature_pos->second; - keys_iterator key_pos = keys.find(val); - if (key_pos == keys.end()) - { - // Create a new entry for this key. Skip the codepoints that - // can't be encoded directly in JSON. - if (codepoint == 34) ++codepoint; // Skip " - else if (codepoint == 92) ++codepoint; // Skip backslash - keys[val] = codepoint; - key_order.push_back(val); - line[idx++] = static_cast(codepoint); + line[idx++] = static_cast(codepoint); ++codepoint; } else { - line[idx++] = static_cast(key_pos->second); + line[idx++] = static_cast(key_pos->second); } } // else, shouldn't get here... } - l.append(boost::python::object( - boost::python::handle<>( - PyUnicode_FromUnicode(line.get(), array_size)))); + l.append(PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, line.get(), array_size)); } } - template void write_features(T const& grid_type, - boost::python::dict& feature_data, - std::vector const& key_order) + py::dict& feature_data, + std::vector const& key_order) { typename T::feature_type const& g_features = grid_type.get_grid_features(); if (g_features.size() <= 0) @@ -265,7 +192,7 @@ void write_features(T const& grid_type, } bool found = false; - boost::python::dict feat; + py::dict feat; mapnik::feature_ptr feature = feat_itr->second; for ( std::string const& attr : attributes ) { @@ -282,27 +209,24 @@ void write_features(T const& grid_type, if (found) { - feature_data[feat_itr->first] = feat; + feature_data[feat_itr->first.c_str()] = feat; } } } template void grid_encode_utf(T const& grid_type, - boost::python::dict & json, - bool add_features, - unsigned int resolution) + py::dict & json, + bool add_features, + unsigned int resolution) { // convert buffer to utf and gather key order - boost::python::list l; + py::list l; std::vector key_order; - if (resolution != 1) { - // resample on the fly - faster, less accurate + if (resolution != 1) + { mapnik::grid2utf(grid_type,l,key_order,resolution); - - // resample first - slower, more accurate - //mapnik::grid2utf2(grid_type,l,key_order,resolution); } else { @@ -310,14 +234,14 @@ void grid_encode_utf(T const& grid_type, } // convert key order to proper python list - boost::python::list keys_a; + py::list keys_a; for ( typename T::lookup_type const& key_id : key_order ) { keys_a.append(key_id); } // gather feature data - boost::python::dict feature_data; + py::dict feature_data; if (add_features) { mapnik::write_features(grid_type,feature_data,key_order); } @@ -329,10 +253,10 @@ void grid_encode_utf(T const& grid_type, } template -boost::python::dict grid_encode( T const& grid, std::string const& format, bool add_features, unsigned int resolution) +py::dict grid_encode( T const& grid, std::string const& format, bool add_features, unsigned int resolution) { if (format == "utf") { - boost::python::dict json; + py::dict json; grid_encode_utf(grid,json,add_features,resolution); return json; } @@ -344,13 +268,13 @@ boost::python::dict grid_encode( T const& grid, std::string const& format, bool } } -template boost::python::dict grid_encode( mapnik::grid const& grid, std::string const& format, bool add_features, unsigned int resolution); -template boost::python::dict grid_encode( mapnik::grid_view const& grid, std::string const& format, bool add_features, unsigned int resolution); +template py::dict grid_encode( mapnik::grid const& grid, std::string const& format, bool add_features, unsigned int resolution); +template py::dict grid_encode( mapnik::grid_view const& grid, std::string const& format, bool add_features, unsigned int resolution); void render_layer_for_grid(mapnik::Map const& map, mapnik::grid & grid, unsigned layer_idx, - boost::python::list const& fields, + py::list const& fields, double scale_factor, unsigned offset_x, unsigned offset_y) @@ -365,12 +289,12 @@ void render_layer_for_grid(mapnik::Map const& map, } // convert python list to std::set - boost::python::ssize_t num_fields = boost::python::len(fields); - for(boost::python::ssize_t i=0; i name(fields[i]); - if (name.check()) + std::size_t num_fields = py::len(fields); + for(std::size_t i = 0; i < num_fields; ++i) { + py::handle handle = fields[i]; + if (py::isinstance(handle)) { - grid.add_field(name()); + grid.add_field(handle.cast()); } else { @@ -401,6 +325,6 @@ void render_layer_for_grid(mapnik::Map const& map, ren.apply(layer,attributes); } -} +} // namespace mapnik #endif diff --git a/src/python_grid_utils.hpp b/src/python_grid_utils.hpp index a15a0264f..84c8bab62 100644 --- a/src/python_grid_utils.hpp +++ b/src/python_grid_utils.hpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -22,54 +22,48 @@ #ifndef MAPNIK_PYTHON_BINDING_GRID_UTILS_INCLUDED #define MAPNIK_PYTHON_BINDING_GRID_UTILS_INCLUDED -// boost -#include - // mapnik #include #include +// pybind11 +#include + +namespace py = pybind11; namespace mapnik { template void grid2utf(T const& grid_type, - boost::python::list& l, + py::list& l, std::vector& key_order); template void grid2utf(T const& grid_type, - boost::python::list& l, + py::list& l, std::vector& key_order, unsigned int resolution); -template -void grid2utf2(T const& grid_type, - boost::python::list& l, - std::vector& key_order, - unsigned int resolution); - - template void write_features(T const& grid_type, - boost::python::dict& feature_data, + py::dict& feature_data, std::vector const& key_order); template void grid_encode_utf(T const& grid_type, - boost::python::dict & json, + py::dict & json, bool add_features, unsigned int resolution); template -boost::python::dict grid_encode( T const& grid, std::string const& format, bool add_features, unsigned int resolution); +py::dict grid_encode( T const& grid, std::string const& format, bool add_features, unsigned int resolution); void render_layer_for_grid(const mapnik::Map& map, mapnik::grid& grid, unsigned layer_idx, // TODO - layer by name or index - boost::python::list const& fields, + py::list const& fields, double scale_factor, unsigned offset_x, unsigned offset_y); diff --git a/src/python_optional.hpp b/src/python_optional.hpp index 45db528ba..1ffaff1bf 100644 --- a/src/python_optional.hpp +++ b/src/python_optional.hpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,179 +20,11 @@ * *****************************************************************************/ -#include -#include +//pybind11 +#include +#include -#include - -// boost::optional to/from converter from John Wiegley - -template -struct object_from_python -{ - object_from_python() { - boost::python::converter::registry::push_back - (&TfromPy::convertible, &TfromPy::construct, - boost::python::type_id()); - } -}; - -template -struct register_python_conversion -{ - register_python_conversion() { - boost::python::to_python_converter(); - object_from_python(); - } -}; - -template -struct python_optional : public mapnik::util::noncopyable -{ - struct optional_to_python - { - static PyObject * convert(const boost::optional& value) - { - return (value ? boost::python::to_python_value()(*value) : - boost::python::detail::none()); - } - }; - - struct optional_from_python - { - static void * convertible(PyObject * source) - { - using namespace boost::python::converter; - - if (source == Py_None) - return source; - - const registration& converters(registered::converters); - - if (implicit_rvalue_convertible_from_python(source, - converters)) { - rvalue_from_python_stage1_data data = - rvalue_from_python_stage1(source, converters); - return rvalue_from_python_stage2(source, data, converters); - } - return 0; - } - - static void construct(PyObject * source, - boost::python::converter::rvalue_from_python_stage1_data * data) - { - using namespace boost::python::converter; - - void * const storage = ((rvalue_from_python_storage *) - data)->storage.bytes; - - if (data->convertible == source) // == None - new (storage) boost::optional(); // A Boost uninitialized value - else - new (storage) boost::optional(*static_cast(data->convertible)); - - data->convertible = storage; - } - }; - - explicit python_optional() - { - register_python_conversion, - optional_to_python, optional_from_python>(); - } -}; - -// to/from boost::optional -template <> -struct python_optional : public mapnik::util::noncopyable -{ - struct optional_to_python - { - static PyObject * convert(const boost::optional& value) - { - return (value ? PyFloat_FromDouble(*value) : - boost::python::detail::none()); - } - }; - - struct optional_from_python - { - static void * convertible(PyObject * source) - { - using namespace boost::python::converter; - - if (source == Py_None || PyFloat_Check(source)) - return source; - return 0; - } - - static void construct(PyObject * source, - boost::python::converter::rvalue_from_python_stage1_data * data) - { - using namespace boost::python::converter; - void * const storage = ((rvalue_from_python_storage > *) - data)->storage.bytes; - if (source == Py_None) // == None - new (storage) boost::optional(); // A Boost uninitialized value - else - new (storage) boost::optional(PyFloat_AsDouble(source)); - data->convertible = storage; - } - }; - - explicit python_optional() - { - register_python_conversion, - optional_to_python, optional_from_python>(); - } -}; - -// to/from boost::optional -template <> -struct python_optional : public mapnik::util::noncopyable -{ - struct optional_to_python - { - static PyObject * convert(const boost::optional& value) - { - if (value) - { - if (*value) Py_RETURN_TRUE; - else Py_RETURN_FALSE; - } - else return boost::python::detail::none(); - } - }; - struct optional_from_python - { - static void * convertible(PyObject * source) - { - using namespace boost::python::converter; - - if (source == Py_None || PyBool_Check(source)) - return source; - return 0; - } - - static void construct(PyObject * source, - boost::python::converter::rvalue_from_python_stage1_data * data) - { - using namespace boost::python::converter; - void * const storage = ((rvalue_from_python_storage > *) - data)->storage.bytes; - if (source == Py_None) // == None - new (storage) boost::optional(); // A Boost uninitialized value - else - { - new (storage) boost::optional(source == Py_True ? true : false); - } - data->convertible = storage; - } - }; - - explicit python_optional() - { - register_python_conversion, - optional_to_python, optional_from_python>(); - } -}; +namespace PYBIND11_NAMESPACE { namespace detail { + template + struct type_caster> : optional_caster> {}; +}} diff --git a/src/python_to_value.hpp b/src/python_to_value.hpp index 89ac66df3..73702c02d 100644 --- a/src/python_to_value.hpp +++ b/src/python_to_value.hpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -22,98 +22,45 @@ #ifndef MAPNIK_PYTHON_BINDING_PYTHON_TO_VALUE #define MAPNIK_PYTHON_BINDING_PYTHON_TO_VALUE -// boost -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-parameter" -#pragma GCC diagnostic ignored "-Wunused-local-typedef" -#pragma GCC diagnostic ignored "-Wmissing-field-initializers" -#pragma GCC diagnostic ignored "-Wshadow" -#include -#pragma GCC diagnostic pop - // mapnik #include #include #include +//pybind11 +#include + +namespace py = pybind11; + namespace mapnik { - static mapnik::attributes dict2attr(boost::python::dict const& d) + static mapnik::attributes dict2attr(py::dict const& d) { - using namespace boost::python; mapnik::attributes vars; mapnik::transcoder tr_("utf8"); - boost::python::list keys=d.keys(); - for (int i=0; i < len(keys); ++i) + for (auto item : d) { - std::string key; - object obj_key = keys[i]; - if (PyUnicode_Check(obj_key.ptr())) + std::string key = std::string(py::str(item.first)); + py::handle handle = item.second; + if (py::isinstance(handle)) { - PyObject* temp = PyUnicode_AsUTF8String(obj_key.ptr()); - if (temp) - { - #if PY_VERSION_HEX >= 0x03000000 - char* c_str = PyBytes_AsString(temp); - #else - char* c_str = PyString_AsString(temp); - #endif - key = c_str; - Py_DecRef(temp); - } + vars[key] = tr_.transcode(handle.cast().c_str()); } - else + else if (py::isinstance(handle)) { - key = extract(keys[i]); + vars[key] = handle.cast(); } - object obj = d[key]; - if (PyUnicode_Check(obj.ptr())) - { - PyObject* temp = PyUnicode_AsUTF8String(obj.ptr()); - if (temp) - { - #if PY_VERSION_HEX >= 0x03000000 - char* c_str = PyBytes_AsString(temp); - #else - char* c_str = PyString_AsString(temp); - #endif - vars[key] = tr_.transcode(c_str); - Py_DecRef(temp); - } - continue; - } - - if (PyBool_Check(obj.ptr())) + else if (py::isinstance(handle)) { - extract ex(obj); - if (ex.check()) - { - vars[key] = ex(); - } + vars[key] = handle.cast(); } - else if (PyFloat_Check(obj.ptr())) + else if (py::isinstance(handle)) { - extract ex(obj); - if (ex.check()) - { - vars[key] = ex(); - } + vars[key] = handle.cast(); } else { - extract ex(obj); - if (ex.check()) - { - vars[key] = ex(); - } - else - { - extract ex0(obj); - if (ex0.check()) - { - vars[key] = tr_.transcode(ex0().c_str()); - } - } + vars[key] = tr_.transcode(py::str(handle).cast().c_str()); } } return vars; diff --git a/src/boost_std_shared_shim.hpp b/src/python_variant.hpp similarity index 57% rename from src/boost_std_shared_shim.hpp rename to src/python_variant.hpp index 8b603e57e..75631ff71 100644 --- a/src/boost_std_shared_shim.hpp +++ b/src/python_variant.hpp @@ -2,7 +2,7 @@ * * This file is part of Mapnik (c++ mapping toolkit) * - * Copyright (C) 2015 Artem Pavlenko + * Copyright (C) 2024 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public @@ -20,30 +20,21 @@ * *****************************************************************************/ -#ifndef MAPNIK_PYTHON_BOOST_STD_SHARED_SHIM -#define MAPNIK_PYTHON_BOOST_STD_SHARED_SHIM - -// boost -#include -#include - -#if BOOST_VERSION < 105300 || defined BOOST_NO_CXX11_SMART_PTR - -// https://github.com/mapnik/mapnik/issues/2022 -#include - -namespace boost { -template const T* get_pointer(std::shared_ptr const& p) -{ - return p.get(); -} - -template T* get_pointer(std::shared_ptr& p) -{ - return p.get(); -} -} // namespace boost - -#endif - -#endif // MAPNIK_PYTHON_BOOST_STD_SHARED_SHIM +//pybind11 +#include +#include +#include + +namespace PYBIND11_NAMESPACE { namespace detail { +template +struct type_caster> : variant_caster> {}; + +// // Specifies the function used to visit the variant -- `apply_visitor` instead of `visit` +// template <> +// struct visit_helper { +// template +// static auto call(Args &&...args) -> decltype(mapnik::util::apply_visitor(args...)) { +// return mapnik::util::apply_visitor(args...); +// } +// }; +}} // namespace PYBIND11_NAMESPACE::detail diff --git a/test/data b/test/data index 2823c6a0b..41c4ceeb0 160000 --- a/test/data +++ b/test/data @@ -1 +1 @@ -Subproject commit 2823c6a0ba9e642869537d8e5c1a1ca5fd3de18a +Subproject commit 41c4ceeb0be4e5e699cdd50bd808054a826c922b diff --git a/test/data-visual b/test/data-visual index ab51821d5..7dfd4568d 160000 --- a/test/data-visual +++ b/test/data-visual @@ -1 +1 @@ -Subproject commit ab51821d511ca47ca1bdae3f83c691cfc0293824 +Subproject commit 7dfd4568d6181da8be3543c8b7522b596a79b774 diff --git a/test/python_tests/agg_rasterizer_integer_overflow_test.py b/test/python_tests/agg_rasterizer_integer_overflow_test.py index bfd8128d9..857766192 100644 --- a/test/python_tests/agg_rasterizer_integer_overflow_test.py +++ b/test/python_tests/agg_rasterizer_integer_overflow_test.py @@ -1,71 +1,77 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import run_all -import mapnik import json +import mapnik # geojson box of the world -geojson = { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -17963313.143242701888084, -6300857.11560364998877 ], [ -17963313.143242701888084, 13071343.332991421222687 ], [ 7396658.353099936619401, 13071343.332991421222687 ], [ 7396658.353099936619401, -6300857.11560364998877 ], [ -17963313.143242701888084, -6300857.11560364998877 ] ] ] } } +geojson = {"type": "Feature", + "properties": {}, + "geometry": {"type": "Polygon", + "coordinates": [[[-17963313.143242701888084, + -6300857.11560364998877], + [-17963313.143242701888084, + 13071343.332991421222687], + [7396658.353099936619401, + 13071343.332991421222687], + [7396658.353099936619401, + -6300857.11560364998877], + [-17963313.143242701888084, + -6300857.11560364998877]]]}} def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_memory(): - expected_color = mapnik.Color('white') - projection = '+init=epsg:4326' - ds = mapnik.MemoryDatasource() - context = mapnik.Context() - feat = mapnik.Feature.from_geojson(json.dumps(geojson),context) - ds.add_feature(feat) - s = mapnik.Style() - r = mapnik.Rule() - sym = mapnik.PolygonSymbolizer() - sym.fill = expected_color - r.symbols.append(sym) - s.rules.append(r) - lyr = mapnik.Layer('Layer',projection) - lyr.datasource = ds - lyr.styles.append('style') - m = mapnik.Map(256,256,projection) - m.background_color = mapnik.Color('green') - m.append_style('style',s) - m.layers.append(lyr) - # 17/20864/45265.png - m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6197514.253362091,-13657768.213995293,6198125.749588372)) - # works 15/5216/11316.png - #m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372)) - im = mapnik.Image(256,256) - mapnik.render(m,im) - eq_(im.get_pixel(128,128),expected_color.packed()) + expected_color = mapnik.Color('white') + projection = 'epsg:4326' + ds = mapnik.MemoryDatasource() + context = mapnik.Context() + feat = mapnik.Feature.from_geojson(json.dumps(geojson), context) + ds.add_feature(feat) + s = mapnik.Style() + r = mapnik.Rule() + sym = mapnik.PolygonSymbolizer() + sym.fill = expected_color + r.symbolizers.append(sym) + s.rules.append(r) + lyr = mapnik.Layer('Layer', projection) + lyr.datasource = ds + lyr.styles.append('style') + m = mapnik.Map(256, 256, projection) + m.background_color = mapnik.Color('green') + m.append_style('style', s) + m.layers.append(lyr) + # 17/20864/45265.png + m.zoom_to_box(mapnik.Box2d(-13658379.710221574, + 6197514.253362091, -13657768.213995293, 6198125.749588372)) + # works 15/5216/11316.png + # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372)) + im = mapnik.Image(256, 256) + mapnik.render(m, im) + assert im.get_pixel(128, 128) == expected_color.packed() def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_csv(): - expected_color = mapnik.Color('white') - projection = '+init=epsg:4326' - ds = mapnik.MemoryDatasource() - context = mapnik.Context() - feat = mapnik.Feature.from_geojson(json.dumps(geojson),context) - ds.add_feature(feat) - geojson_string = "geojson\n'%s'" % json.dumps(geojson['geometry']) - ds = mapnik.Datasource(**{'type':'csv','inline':geojson_string}) - s = mapnik.Style() - r = mapnik.Rule() - sym = mapnik.PolygonSymbolizer() - sym.fill = expected_color - r.symbols.append(sym) - s.rules.append(r) - lyr = mapnik.Layer('Layer',projection) - lyr.datasource = ds - lyr.styles.append('style') - m = mapnik.Map(256,256,projection) - m.background_color = mapnik.Color('green') - m.append_style('style',s) - m.layers.append(lyr) - # 17/20864/45265.png - m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6197514.253362091,-13657768.213995293,6198125.749588372)) - # works 15/5216/11316.png - #m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372)) - im = mapnik.Image(256,256) - mapnik.render(m,im) - eq_(im.get_pixel(128,128),expected_color.packed()) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + expected_color = mapnik.Color('white') + projection = 'epsg:4326' + ds = mapnik.MemoryDatasource() + context = mapnik.Context() + feat = mapnik.Feature.from_geojson(json.dumps(geojson), context) + ds.add_feature(feat) + geojson_string = "geojson\n'%s'" % json.dumps(geojson['geometry']) + ds = mapnik.Datasource(**{'type': 'csv', 'inline': geojson_string}) + s = mapnik.Style() + r = mapnik.Rule() + sym = mapnik.PolygonSymbolizer() + sym.fill = expected_color + r.symbolizers.append(sym) + s.rules.append(r) + lyr = mapnik.Layer('Layer', projection) + lyr.datasource = ds + lyr.styles.append('style') + m = mapnik.Map(256, 256, projection) + m.background_color = mapnik.Color('green') + m.append_style('style', s) + m.layers.append(lyr) + # 17/20864/45265.png + m.zoom_to_box(mapnik.Box2d(-13658379.710221574, + 6197514.253362091, -13657768.213995293, 6198125.749588372)) + # works 15/5216/11316.png + # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372)) + im = mapnik.Image(256, 256) + mapnik.render(m, im) + assert im.get_pixel(128, 128) == expected_color.packed() diff --git a/test/python_tests/box2d_test.py b/test/python_tests/box2d_test.py index c44100287..e3a477003 100644 --- a/test/python_tests/box2d_test.py +++ b/test/python_tests/box2d_test.py @@ -1,176 +1,155 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_,assert_true,assert_almost_equal,assert_false -from utilities import run_all import mapnik +import pytest def test_coord_init(): c = mapnik.Coord(100, 100) - - eq_(c.x, 100) - eq_(c.y, 100) + assert c.x == 100 + assert c.y == 100 def test_coord_multiplication(): - c = mapnik.Coord(100, 100) - c *= 2 - - eq_(c.x, 200) - eq_(c.y, 200) + c = mapnik.Coord(100, 100) + c *= 2 + assert c.x == 200 + assert c.y == 200 def test_envelope_init(): - e = mapnik.Box2d(100, 100, 200, 200) - - assert_true(e.contains(100, 100)) - assert_true(e.contains(100, 200)) - assert_true(e.contains(200, 200)) - assert_true(e.contains(200, 100)) - - assert_true(e.contains(e.center())) - - assert_false(e.contains(99.9, 99.9)) - assert_false(e.contains(99.9, 200.1)) - assert_false(e.contains(200.1, 200.1)) - assert_false(e.contains(200.1, 99.9)) - - eq_(e.width(), 100) - eq_(e.height(), 100) - - eq_(e.minx, 100) - eq_(e.miny, 100) - - eq_(e.maxx, 200) - eq_(e.maxy, 200) - - eq_(e[0],100) - eq_(e[1],100) - eq_(e[2],200) - eq_(e[3],200) - eq_(e[0],e[-4]) - eq_(e[1],e[-3]) - eq_(e[2],e[-2]) - eq_(e[3],e[-1]) + e = mapnik.Box2d(100, 100, 200, 200) + assert e.contains(100, 100) + assert e.contains(100, 200) + assert e.contains(200, 200) + assert e.contains(200, 100) + assert e.contains(e.center()) + assert not e.contains(99.9, 99.9) + assert not e.contains(99.9, 200.1) + assert not e.contains(200.1, 200.1) + assert not e.contains(200.1, 99.9) + assert e.width() == 100 + assert e.height() == 100 + assert e.minx == 100 + assert e.miny == 100 + assert e.maxx == 200 + assert e.maxy == 200 + assert e[0] == 100 + assert e[1] == 100 + assert e[2] == 200 + assert e[3] == 200 + assert e[0] == e[-4] + assert e[1] == e[-3] + assert e[2] == e[-2] + assert e[3] == e[-1] + c = e.center() + assert c.x == 150 + assert c.y == 150 - c = e.center() - - eq_(c.x, 150) - eq_(c.y, 150) def test_envelope_static_init(): e = mapnik.Box2d.from_string('100 100 200 200') e2 = mapnik.Box2d.from_string('100,100,200,200') e3 = mapnik.Box2d.from_string('100 , 100 , 200 , 200') - eq_(e,e2) - eq_(e,e3) - - assert_true(e.contains(100, 100)) - assert_true(e.contains(100, 200)) - assert_true(e.contains(200, 200)) - assert_true(e.contains(200, 100)) - - assert_true(e.contains(e.center())) - - assert_false(e.contains(99.9, 99.9)) - assert_false(e.contains(99.9, 200.1)) - assert_false(e.contains(200.1, 200.1)) - assert_false(e.contains(200.1, 99.9)) - - eq_(e.width(), 100) - eq_(e.height(), 100) - eq_(e.minx, 100) - eq_(e.miny, 100) - - eq_(e.maxx, 200) - eq_(e.maxy, 200) - - eq_(e[0],100) - eq_(e[1],100) - eq_(e[2],200) - eq_(e[3],200) - eq_(e[0],e[-4]) - eq_(e[1],e[-3]) - eq_(e[2],e[-2]) - eq_(e[3],e[-1]) + assert e == e2 + assert e == e3 + assert e.contains(100, 100) + assert e.contains(100, 200) + assert e.contains(200, 200) + assert e.contains(200, 100) + + assert e.contains(e.center()) + assert not e.contains(99.9, 99.9) + assert not e.contains(99.9, 200.1) + assert not e.contains(200.1, 200.1) + assert not e.contains(200.1, 99.9) + + assert e.width() == 100 + assert e.height() == 100 + assert e.minx == 100 + assert e.miny == 100 + assert e.maxx == 200 + assert e.maxy == 200 + + assert e[0] == 100 + assert e[1] == 100 + assert e[2] == 200 + assert e[3] == 200 + assert e[0] == e[-4] + assert e[1] == e[-3] + assert e[2] == e[-2] + assert e[3] == e[-1] c = e.center() - - eq_(c.x, 150) - eq_(c.y, 150) + assert c.x == 150 + assert c.y == 150 def test_envelope_multiplication(): - # no width then no impact of multiplication - a = mapnik.Box2d(100, 100, 100, 100) - a *= 5 - eq_(a.minx,100) - eq_(a.miny,100) - eq_(a.maxx,100) - eq_(a.maxy,100) - - a = mapnik.Box2d(100.0, 100.0, 100.0, 100.0) - a *= 5 - eq_(a.minx,100) - eq_(a.miny,100) - eq_(a.maxx,100) - eq_(a.maxy,100) - - a = mapnik.Box2d(100.0, 100.0, 100.001, 100.001) - a *= 5 - assert_almost_equal(a.minx, 99.9979, places=3) - assert_almost_equal(a.miny, 99.9979, places=3) - assert_almost_equal(a.maxx, 100.0030, places=3) - assert_almost_equal(a.maxy, 100.0030, places=3) - - e = mapnik.Box2d(100, 100, 200, 200) - e *= 2 - eq_(e.minx,50) - eq_(e.miny,50) - eq_(e.maxx,250) - eq_(e.maxy,250) - - assert_true(e.contains(50, 50)) - assert_true(e.contains(50, 250)) - assert_true(e.contains(250, 250)) - assert_true(e.contains(250, 50)) - - assert_false(e.contains(49.9, 49.9)) - assert_false(e.contains(49.9, 250.1)) - assert_false(e.contains(250.1, 250.1)) - assert_false(e.contains(250.1, 49.9)) - - assert_true(e.contains(e.center())) - - eq_(e.width(), 200) - eq_(e.height(), 200) - - eq_(e.minx, 50) - eq_(e.miny, 50) - - eq_(e.maxx, 250) - eq_(e.maxy, 250) - - c = e.center() + # no width then no impact of multiplication + a = mapnik.Box2d(100, 100, 100, 100) + a *= 5 + assert a.minx == 100 + assert a.miny == 100 + assert a.maxx == 100 + assert a.maxy == 100 + + a = mapnik.Box2d(100.0, 100.0, 100.0, 100.0) + a *= 5 + assert a.minx == 100 + assert a.miny == 100 + assert a.maxx == 100 + assert a.maxy == 100 + + a = mapnik.Box2d(100.0, 100.0, 100.001, 100.001) + a *= 5 + assert a.minx == pytest.approx(99.9979, 1e-3) + assert a.miny == pytest.approx(99.9979, 1e-3) + assert a.maxx == pytest.approx(100.0030,1e-3) + assert a.maxy == pytest.approx(100.0030,1e-3) + + e = mapnik.Box2d(100, 100, 200, 200) + e *= 2 + assert e.minx == 50 + assert e.miny == 50 + assert e.maxx == 250 + assert e.maxy == 250 + + assert e.contains(50, 50) + assert e.contains(50, 250) + assert e.contains(250, 250) + assert e.contains(250, 50) + + assert not e.contains(49.9, 49.9) + assert not e.contains(49.9, 250.1) + assert not e.contains(250.1, 250.1) + assert not e.contains(250.1, 49.9) + + c = e.center() + assert c.x == 150 + assert c.y == 150 + + assert e.contains(c) + + assert e.width() == 200 + assert e.height()== 200 + + assert e.minx == 50 + assert e.miny == 50 + + assert e.maxx == 250 + assert e.maxy == 250 - eq_(c.x, 150) - eq_(c.y, 150) def test_envelope_clipping(): - e1 = mapnik.Box2d(-180,-90,180,90) - e2 = mapnik.Box2d(-120,40,-110,48) - e1.clip(e2) - eq_(e1,e2) - - # madagascar in merc - e1 = mapnik.Box2d(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673) - e2 = mapnik.Box2d(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520) - e1.clip(e2) - eq_(e1,e2) - - # nz in lon/lat - e1 = mapnik.Box2d(163.8062, -47.1897, 179.3628, -33.9069) - e2 = mapnik.Box2d(173.7378, -39.6395, 174.4849, -38.9252) - e1.clip(e2) - eq_(e1,e2) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + e1 = mapnik.Box2d(-180, -90, 180, 90) + e2 = mapnik.Box2d(-120, 40, -110, 48) + e1.clip(e2) + assert e1 == e2 + + # madagascar in merc + e1 = mapnik.Box2d(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673) + e2 = mapnik.Box2d(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520) + e1.clip(e2) + assert e1 == e2 + +# # nz in lon/lat + e1 = mapnik.Box2d(163.8062, -47.1897, 179.3628, -33.9069) + e2 = mapnik.Box2d(173.7378, -39.6395, 174.4849, -38.9252) + e1.clip(e2) + assert e1 == e2 diff --git a/test/python_tests/buffer_clear_test.py b/test/python_tests/buffer_clear_test.py index b4b3bda84..c72c0e919 100644 --- a/test/python_tests/buffer_clear_test.py +++ b/test/python_tests/buffer_clear_test.py @@ -1,61 +1,52 @@ -import os, mapnik -from nose.tools import eq_ -from utilities import execution_path, run_all - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) +import os +import mapnik def test_clearing_image_data(): - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) # make sure it equals itself - bytes = im.tostring() - eq_(im.tostring(),bytes) + bytes = im.to_string() + assert im.to_string() == bytes # set background, then clear im.fill(mapnik.Color('green')) - eq_(im.tostring()!=bytes,True) + assert not im.to_string() == bytes # clear image, should now equal original im.clear() - eq_(im.tostring(),bytes) + assert im.to_string() == bytes def make_map(): ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') pixel_key = 1 - f = mapnik.Feature(context,pixel_key) + f = mapnik.Feature(context, pixel_key) f['Name'] = str(pixel_key) - f.geometry=mapnik.Geometry.from_wkt('POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))') + f.geometry = mapnik.Geometry.from_wkt( + 'POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))') ds.add_feature(f) s = mapnik.Style() r = mapnik.Rule() symb = mapnik.PolygonSymbolizer() - r.symbols.append(symb) + r.symbolizers.append(symb) s.rules.append(r) lyr = mapnik.Layer('Places') lyr.datasource = ds lyr.styles.append('places_labels') - width,height = 256,256 - m = mapnik.Map(width,height) - m.append_style('places_labels',s) + width, height = 256, 256 + m = mapnik.Map(width, height) + m.append_style('places_labels', s) m.layers.append(lyr) m.zoom_all() return m if mapnik.has_grid_renderer(): def test_clearing_grid_data(): - g = mapnik.Grid(256,256) + g = mapnik.Grid(256, 256) utf = g.encode() # make sure it equals itself - eq_(g.encode(),utf) + assert g.encode() == utf m = make_map() - mapnik.render_layer(m,g,layer=0,fields=['__id__','Name']) - eq_(g.encode()!=utf,True) + mapnik.render_layer(m, g, layer=0, fields=['__id__', 'Name']) + assert g.encode() != utf # clear grid, should now match original g.clear() - eq_(g.encode(),utf) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert g.encode() == utf diff --git a/test/python_tests/cairo_test.py b/test/python_tests/cairo_test.py index 3c626d4a6..a3d324314 100644 --- a/test/python_tests/cairo_test.py +++ b/test/python_tests/cairo_test.py @@ -1,23 +1,24 @@ -#!/usr/bin/env python - import os import shutil import mapnik -from nose.tools import eq_ -from utilities import execution_path, run_all +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + def make_tmp_map(): - m = mapnik.Map(512,512) + m = mapnik.Map(512, 512) m.background_color = mapnik.Color('steelblue') ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['Name'] = 'Hello' f.geometry = mapnik.Geometry.from_wkt('POINT (0 0)') ds.add_feature(f) @@ -30,167 +31,200 @@ def make_tmp_map(): lyr = mapnik.Layer('Layer') lyr.datasource = ds lyr.styles.append('style') - m.append_style('style',s) + m.append_style('style', s) m.layers.append(lyr) return m -def draw_title(m,ctx,text,size=10,color=mapnik.Color('black')): +def draw_title(m, ctx, text, size=10, color=mapnik.Color('black')): """ Draw a Map Title near the top of a page.""" - middle = m.width/2.0 + middle = m.width / 2.0 ctx.set_source_rgba(*cairo_color(color)) - ctx.select_font_face("DejaVu Sans Book", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL) + ctx.select_font_face( + "Helvetica", + cairo.FONT_SLANT_NORMAL, + cairo.FONT_WEIGHT_NORMAL) ctx.set_font_size(size) x_bearing, y_bearing, width, height = ctx.text_extents(text)[:4] ctx.move_to(middle - width / 2 - x_bearing, 20.0 - height / 2 - y_bearing) ctx.show_text(text) -def draw_neatline(m,ctx): - w,h = m.width, m.height + +def draw_neatline(m, ctx): + w, h = m.width, m.height ctx.set_source_rgba(*cairo_color(mapnik.Color('black'))) outline = [ - [0,0],[w,0],[w,h],[0,h] + [0, 0], [w, 0], [w, h], [0, h] ] ctx.set_line_width(1) - for idx,pt in enumerate(outline): + for idx, pt in enumerate(outline): if (idx == 0): - ctx.move_to(*pt) + ctx.move_to(*pt) else: - ctx.line_to(*pt) + ctx.line_to(*pt) ctx.close_path() inset = 6 inline = [ - [inset,inset],[w-inset,inset],[w-inset,h-inset],[inset,h-inset] + [inset, inset], [w - inset, inset], [w - + inset, h - inset], [inset, h - inset] ] - ctx.set_line_width(inset/2) - for idx,pt in enumerate(inline): + ctx.set_line_width(inset / 2) + for idx, pt in enumerate(inline): if (idx == 0): - ctx.move_to(*pt) + ctx.move_to(*pt) else: - ctx.line_to(*pt) + ctx.line_to(*pt) ctx.close_path() ctx.stroke() + def cairo_color(c): """ Return a Cairo color tuple from a Mapnik Color.""" - ctx_c = (c.r/255.0,c.g/255.0,c.b/255.0,c.a/255.0) + ctx_c = (c.r / 255.0, c.g / 255.0, c.b / 255.0, c.a / 255.0) return ctx_c if mapnik.has_pycairo(): import cairo - def test_passing_pycairo_context_svg(): + def test_passing_pycairo_context_svg(setup): m = make_tmp_map() - m.zoom_to_box(mapnik.Box2d(-180,-90,180,90)) + m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90)) test_cairo_file = '/tmp/mapnik-cairo-context-test.svg' surface = cairo.SVGSurface(test_cairo_file, m.width, m.height) - expected_cairo_file = './images/pycairo/cairo-cairo-expected.svg' + expected_cairo_file = 'images/pycairo/cairo-cairo-expected.svg' context = cairo.Context(surface) - mapnik.render(m,context) - draw_title(m,context,"Hello Map",size=20) - draw_neatline(m,context) + mapnik.render(m, context) + draw_title(m, context, "Hello Map", size=20) + draw_neatline(m, context) surface.finish() if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'): - print 'generated expected cairo surface file %s' % expected_cairo_file - shutil.copy(test_cairo_file,expected_cairo_file) - diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size) - msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file) - eq_( diff < 1500, True, msg) + print('generated expected cairo surface file', expected_cairo_file) + shutil.copy(test_cairo_file, expected_cairo_file) + diff = abs( + os.stat(expected_cairo_file).st_size - + os.stat(test_cairo_file).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % ( + diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file) + assert diff < 1500, msg os.remove(test_cairo_file) def test_passing_pycairo_context_pdf(): m = make_tmp_map() - m.zoom_to_box(mapnik.Box2d(-180,-90,180,90)) + m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90)) test_cairo_file = '/tmp/mapnik-cairo-context-test.pdf' surface = cairo.PDFSurface(test_cairo_file, m.width, m.height) - expected_cairo_file = './images/pycairo/cairo-cairo-expected.pdf' + expected_cairo_file = 'images/pycairo/cairo-cairo-expected.pdf' context = cairo.Context(surface) - mapnik.render(m,context) - draw_title(m,context,"Hello Map",size=20) - draw_neatline(m,context) + mapnik.render(m, context) + draw_title(m, context, "Hello Map", size=20) + draw_neatline(m, context) surface.finish() if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'): - print 'generated expected cairo surface file %s' % expected_cairo_file - shutil.copy(test_cairo_file,expected_cairo_file) - diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size) - msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file) - eq_( diff < 1500, True, msg) + print('generated expected cairo surface file', expected_cairo_file) + shutil.copy(test_cairo_file, expected_cairo_file) + diff = abs( + os.stat(expected_cairo_file).st_size - + os.stat(test_cairo_file).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % ( + diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file) + assert diff < 1500, msg os.remove(test_cairo_file) def test_passing_pycairo_context_png(): m = make_tmp_map() - m.zoom_to_box(mapnik.Box2d(-180,-90,180,90)) + m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90)) test_cairo_file = '/tmp/mapnik-cairo-context-test.png' surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, m.width, m.height) - expected_cairo_file = './images/pycairo/cairo-cairo-expected.png' - expected_cairo_file2 = './images/pycairo/cairo-cairo-expected-reduced.png' + expected_cairo_file = 'images/pycairo/cairo-cairo-expected.png' + expected_cairo_file2 = 'images/pycairo/cairo-cairo-expected-reduced.png' context = cairo.Context(surface) - mapnik.render(m,context) - draw_title(m,context,"Hello Map",size=20) - draw_neatline(m,context) + mapnik.render(m, context) + draw_title(m, context, "Hello Map", size=20) + draw_neatline(m, context) surface.write_to_png(test_cairo_file) - reduced_color_image = test_cairo_file.replace('png','-mapnik.png') + reduced_color_image = test_cairo_file.replace('png', '-mapnik.png') im = mapnik.Image.from_cairo(surface) - im.save(reduced_color_image,'png8') + im.save(reduced_color_image, 'png8') surface.finish() if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'): - print 'generated expected cairo surface file %s' % expected_cairo_file - shutil.copy(test_cairo_file,expected_cairo_file) - diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size) - msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file) - eq_( diff < 500, True, msg) + print('generated expected cairo surface file', expected_cairo_file) + shutil.copy(test_cairo_file, expected_cairo_file) + diff = abs( + os.stat(expected_cairo_file).st_size - + os.stat(test_cairo_file).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % ( + diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file) + assert diff < 500, msg os.remove(test_cairo_file) - if not os.path.exists(expected_cairo_file2) or os.environ.get('UPDATE'): - print 'generated expected cairo surface file %s' % expected_cairo_file2 - shutil.copy(reduced_color_image,expected_cairo_file2) - diff = abs(os.stat(expected_cairo_file2).st_size-os.stat(reduced_color_image).st_size) - msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,reduced_color_image,'tests/python_tests/'+ expected_cairo_file2) - eq_( diff < 500, True, msg) + if not os.path.exists( + expected_cairo_file2) or os.environ.get('UPDATE'): + print( + 'generated expected cairo surface file', + expected_cairo_file2) + shutil.copy(reduced_color_image, expected_cairo_file2) + diff = abs( + os.stat(expected_cairo_file2).st_size - + os.stat(reduced_color_image).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % ( + diff, reduced_color_image, 'tests/python_tests/' + expected_cairo_file2) + assert diff < 500, msg os.remove(reduced_color_image) if 'sqlite' in mapnik.DatasourceCache.plugin_names(): - def _pycairo_surface(type,sym): - test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % (sym,type) - expected_cairo_file = './images/pycairo/cairo-surface-expected.%s.%s' % (sym,type) - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/%s_symbolizer.xml' % sym) - m.zoom_all() - if hasattr(cairo,'%sSurface' % type.upper()): - surface = getattr(cairo,'%sSurface' % type.upper())(test_cairo_file, m.width,m.height) - mapnik.render(m, surface) - surface.finish() - if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'): - print 'generated expected cairo surface file %s' % expected_cairo_file - shutil.copy(test_cairo_file,expected_cairo_file) - diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size) - msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file) - if os.uname()[0] == 'Darwin': - eq_( diff < 2100, True, msg) - else: - eq_( diff < 23000, True, msg) - os.remove(test_cairo_file) - return True + def _pycairo_surface(type, sym): + test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % ( + sym, type) + expected_cairo_file = 'images/pycairo/cairo-surface-expected.%s.%s' % ( + sym, type) + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/%s_symbolizer.xml' % sym) + m.zoom_all() + if hasattr(cairo, '%sSurface' % type.upper()): + surface = getattr( + cairo, + '%sSurface' % + type.upper())( + test_cairo_file, + m.width, + m.height) + mapnik.render(m, surface) + surface.finish() + if not os.path.exists( + expected_cairo_file) or os.environ.get('UPDATE'): + print( + 'generated expected cairo surface file', + expected_cairo_file) + shutil.copy(test_cairo_file, expected_cairo_file) + diff = abs( + os.stat(expected_cairo_file).st_size - + os.stat(test_cairo_file).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % ( + diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file) + if os.uname()[0] == 'Darwin': + assert diff < 2100, msg else: - print 'skipping cairo.%s test since surface is not available' % type.upper() - return True + assert diff < 23000, msg + os.remove(test_cairo_file) + return True + else: + print( + 'skipping cairo.%s test since surface is not available' % + type.upper()) + return True def test_pycairo_svg_surface1(): - eq_(_pycairo_surface('svg','point'),True) + assert _pycairo_surface('svg', 'point') def test_pycairo_svg_surface2(): - eq_(_pycairo_surface('svg','building'),True) + assert _pycairo_surface('svg', 'building') def test_pycairo_svg_surface3(): - eq_(_pycairo_surface('svg','polygon'),True) + assert _pycairo_surface('svg', 'polygon') def test_pycairo_pdf_surface1(): - eq_(_pycairo_surface('pdf','point'),True) + assert _pycairo_surface('pdf', 'point') def test_pycairo_pdf_surface2(): - eq_(_pycairo_surface('pdf','building'),True) + assert _pycairo_surface('pdf', 'building') def test_pycairo_pdf_surface3(): - eq_(_pycairo_surface('pdf','polygon'),True) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert _pycairo_surface('pdf', 'polygon') diff --git a/test/python_tests/color_test.py b/test/python_tests/color_test.py index 900faf11b..e8fc90fc6 100644 --- a/test/python_tests/color_test.py +++ b/test/python_tests/color_test.py @@ -1,115 +1,102 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import sys -import os, mapnik -from timeit import Timer, time -from nose.tools import * -from utilities import execution_path, run_all, get_unique_colors - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) +import os +import mapnik def test_color_init(): c = mapnik.Color(12, 128, 255) - eq_(c.r, 12) - eq_(c.g, 128) - eq_(c.b, 255) - eq_(c.a, 255) - eq_(False, c.get_premultiplied()) + assert c.r == 12 + assert c.g == 128 + assert c.b == 255 + assert c.a == 255 + assert not c.get_premultiplied() c = mapnik.Color(16, 32, 64, 128) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(False, c.get_premultiplied()) - c = mapnik.Color(16, 32, 64, 128,True) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(True, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert not c.get_premultiplied() + c = mapnik.Color(16, 32, 64, 128, True) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert c.get_premultiplied() c = mapnik.Color('rgba(16,32,64,0.5)') - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(False, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert not c.get_premultiplied() c = mapnik.Color('rgba(16,32,64,0.5)', True) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(True, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert c.get_premultiplied() hex_str = '#10204080' c = mapnik.Color(hex_str) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(hex_str, c.to_hex_string()) - eq_(False, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert hex_str == c.to_hex_string() + assert not c.get_premultiplied() c = mapnik.Color(hex_str, True) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(hex_str, c.to_hex_string()) - eq_(True, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert hex_str == c.to_hex_string() + assert c.get_premultiplied() rgba_int = 2151686160 c = mapnik.Color(rgba_int) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(rgba_int, c.packed()) - eq_(False, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert rgba_int == c.packed() + assert not c.get_premultiplied() c = mapnik.Color(rgba_int, True) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) - eq_(rgba_int, c.packed()) - eq_(True, c.get_premultiplied()) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 + assert rgba_int == c.packed() + assert c.get_premultiplied() + def test_color_properties(): c = mapnik.Color(16, 32, 64, 128) - eq_(c.r, 16) - eq_(c.g, 32) - eq_(c.b, 64) - eq_(c.a, 128) + assert c.r == 16 + assert c.g == 32 + assert c.b == 64 + assert c.a == 128 c.r = 17 - eq_(c.r, 17) + assert c.r == 17 c.g = 33 - eq_(c.g, 33) + assert c.g == 33 c.b = 65 - eq_(c.b, 65) + assert c.b == 65 c.a = 128 - eq_(c.a, 128) + assert c.a == 128 + def test_color_premultiply(): c = mapnik.Color(16, 33, 255, 128) - eq_(c.premultiply(), True) - eq_(c.r, 8) - eq_(c.g, 17) - eq_(c.b, 128) - eq_(c.a, 128) + assert c.premultiply() + assert c.r == 8 + assert c.g == 17 + assert c.b == 128 + assert c.a == 128 # Repeating it again should do nothing - eq_(c.premultiply(), False) - eq_(c.r, 8) - eq_(c.g, 17) - eq_(c.b, 128) - eq_(c.a, 128) + assert not c.premultiply() + assert c.r == 8 + assert c.g == 17 + assert c.b == 128 + assert c.a == 128 c.demultiply() c.demultiply() # This will not return the same values as before but we expect that - eq_(c.r,15) - eq_(c.g,33) - eq_(c.b,255) - eq_(c.a,128) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert c.r == 15 + assert c.g == 33 + assert c.b == 255 + assert c.a == 128 diff --git a/test/python_tests/compare_test.py b/test/python_tests/compare_test.py index f4b656309..b66775262 100644 --- a/test/python_tests/compare_test.py +++ b/test/python_tests/compare_test.py @@ -1,112 +1,104 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik -from nose.tools import * -from utilities import execution_path, run_all - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) +import os +import mapnik def test_another_compare(): - im = mapnik.Image(5,5) - im2 = mapnik.Image(5,5) + im = mapnik.Image(5, 5) + im2 = mapnik.Image(5, 5) im2.fill(mapnik.Color('rgba(255,255,255,0)')) - eq_(im.compare(im2,16), im.width() * im.height()) + assert im.compare(im2, 16) == im.width() * im.height() def test_compare_rgba8(): - im = mapnik.Image(5,5,mapnik.ImageType.rgba8) - im.fill(mapnik.Color(0,0,0,0)) - eq_(im.compare(im), 0) - im2 = mapnik.Image(5,5,mapnik.ImageType.rgba8) - im2.fill(mapnik.Color(0,0,0,0)) - eq_(im.compare(im2), 0) - eq_(im2.compare(im), 0) - im2.fill(mapnik.Color(0,0,0,12)) - eq_(im.compare(im2), 25) - eq_(im.compare(im2, 0, False), 0) - im3 = mapnik.Image(5,5,mapnik.ImageType.rgba8) - im3.set_pixel(0,0, mapnik.Color(0,0,0,0)) - im3.set_pixel(0,1, mapnik.Color(1,1,1,1)) - im3.set_pixel(1,0, mapnik.Color(2,2,2,2)) - im3.set_pixel(1,1, mapnik.Color(3,3,3,3)) - eq_(im.compare(im3), 3) - eq_(im.compare(im3,1),2) - eq_(im.compare(im3,2),1) - eq_(im.compare(im3,3),0) + im = mapnik.Image(5, 5, mapnik.ImageType.rgba8) + im.fill(mapnik.Color(0, 0, 0, 0)) + assert im.compare(im) == 0 + im2 = mapnik.Image(5, 5, mapnik.ImageType.rgba8) + im2.fill(mapnik.Color(0, 0, 0, 0)) + assert im.compare(im2) == 0 + assert im2.compare(im) == 0 + im2.fill(mapnik.Color(0, 0, 0, 12)) + assert im.compare(im2) == 25 + assert im.compare(im2, 0, False) == 0 + im3 = mapnik.Image(5, 5, mapnik.ImageType.rgba8) + im3.set_pixel(0, 0, mapnik.Color(0, 0, 0, 0)) + im3.set_pixel(0, 1, mapnik.Color(1, 1, 1, 1)) + im3.set_pixel(1, 0, mapnik.Color(2, 2, 2, 2)) + im3.set_pixel(1, 1, mapnik.Color(3, 3, 3, 3)) + assert im.compare(im3) == 3 + assert im.compare(im3, 1) == 2 + assert im.compare(im3, 2) == 1 + assert im.compare(im3, 3) == 0 + def test_compare_2_image(): - im = mapnik.Image(5,5) - im.set_pixel(0,0, mapnik.Color(254, 254, 254, 254)) - im.set_pixel(4,4, mapnik.Color('white')) - im2 = mapnik.Image(5,5) - eq_(im2.compare(im,16), 2) + im = mapnik.Image(5, 5) + im.set_pixel(0, 0, mapnik.Color(254, 254, 254, 254)) + im.set_pixel(4, 4, mapnik.Color('white')) + im2 = mapnik.Image(5, 5) + assert im2.compare(im, 16) == 2 + def test_compare_dimensions(): - im = mapnik.Image(2,2) - im2 = mapnik.Image(3,3) - eq_(im.compare(im2), 4) - eq_(im2.compare(im), 9) + im = mapnik.Image(2, 2) + im2 = mapnik.Image(3, 3) + assert im.compare(im2) == 4 + assert im2.compare(im) == 9 + def test_compare_gray8(): - im = mapnik.Image(2,2,mapnik.ImageType.gray8) + im = mapnik.Image(2, 2, mapnik.ImageType.gray8) im.fill(0) - eq_(im.compare(im), 0) - im2 = mapnik.Image(2,2,mapnik.ImageType.gray8) + assert im.compare(im) == 0 + im2 = mapnik.Image(2, 2, mapnik.ImageType.gray8) im2.fill(0) - eq_(im.compare(im2), 0) - eq_(im2.compare(im), 0) - eq_(im.compare(im2, 0, False), 0) - im3 = mapnik.Image(2,2,mapnik.ImageType.gray8) - im3.set_pixel(0,0,0) - im3.set_pixel(0,1,1) - im3.set_pixel(1,0,2) - im3.set_pixel(1,1,3) - eq_(im.compare(im3),3) - eq_(im.compare(im3,1),2) - eq_(im.compare(im3,2),1) - eq_(im.compare(im3,3),0) + assert im.compare(im2) == 0 + assert im2.compare(im) == 0 + assert im.compare(im2, 0, False) == 0 + im3 = mapnik.Image(2, 2, mapnik.ImageType.gray8) + im3.set_pixel(0, 0, 0) + im3.set_pixel(0, 1, 1) + im3.set_pixel(1, 0, 2) + im3.set_pixel(1, 1, 3) + assert im.compare(im3) == 3 + assert im.compare(im3, 1) == 2 + assert im.compare(im3, 2) == 1 + assert im.compare(im3, 3) == 0 + def test_compare_gray16(): - im = mapnik.Image(2,2,mapnik.ImageType.gray16) + im = mapnik.Image(2, 2, mapnik.ImageType.gray16) im.fill(0) - eq_(im.compare(im), 0) - im2 = mapnik.Image(2,2,mapnik.ImageType.gray16) + assert im.compare(im) == 0 + im2 = mapnik.Image(2, 2, mapnik.ImageType.gray16) im2.fill(0) - eq_(im.compare(im2), 0) - eq_(im2.compare(im), 0) - eq_(im.compare(im2, 0, False), 0) - im3 = mapnik.Image(2,2,mapnik.ImageType.gray16) - im3.set_pixel(0,0,0) - im3.set_pixel(0,1,1) - im3.set_pixel(1,0,2) - im3.set_pixel(1,1,3) - eq_(im.compare(im3),3) - eq_(im.compare(im3,1),2) - eq_(im.compare(im3,2),1) - eq_(im.compare(im3,3),0) + assert im.compare(im2) == 0 + assert im2.compare(im) == 0 + assert im.compare(im2, 0, False) == 0 + im3 = mapnik.Image(2, 2, mapnik.ImageType.gray16) + im3.set_pixel(0, 0, 0) + im3.set_pixel(0, 1, 1) + im3.set_pixel(1, 0, 2) + im3.set_pixel(1, 1, 3) + assert im.compare(im3) == 3 + assert im.compare(im3, 1) == 2 + assert im.compare(im3, 2) == 1 + assert im.compare(im3, 3) == 0 + def test_compare_gray32f(): - im = mapnik.Image(2,2,mapnik.ImageType.gray32f) + im = mapnik.Image(2, 2, mapnik.ImageType.gray32f) im.fill(0.5) - eq_(im.compare(im), 0) - im2 = mapnik.Image(2,2,mapnik.ImageType.gray32f) + assert im.compare(im) == 0 + im2 = mapnik.Image(2, 2, mapnik.ImageType.gray32f) im2.fill(0.5) - eq_(im.compare(im2), 0) - eq_(im2.compare(im), 0) - eq_(im.compare(im2, 0, False), 0) - im3 = mapnik.Image(2,2,mapnik.ImageType.gray32f) - im3.set_pixel(0,0,0.5) - im3.set_pixel(0,1,1.5) - im3.set_pixel(1,0,2.5) - im3.set_pixel(1,1,3.5) - eq_(im.compare(im3),3) - eq_(im.compare(im3,1.0),2) - eq_(im.compare(im3,2.0),1) - eq_(im.compare(im3,3.0),0) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im.compare(im2) == 0 + assert im2.compare(im) == 0 + assert im.compare(im2, 0, False) == 0 + im3 = mapnik.Image(2, 2, mapnik.ImageType.gray32f) + im3.set_pixel(0, 0, 0.5) + im3.set_pixel(0, 1, 1.5) + im3.set_pixel(1, 0, 2.5) + im3.set_pixel(1, 1, 3.5) + assert im.compare(im3) == 3 + assert im.compare(im3, 1.0) == 2 + assert im.compare(im3, 2.0) == 1 + assert im.compare(im3, 3.0) == 0 diff --git a/test/python_tests/compositing_test.py b/test/python_tests/compositing_test.py index a0c8255ed..46ec373a5 100644 --- a/test/python_tests/compositing_test.py +++ b/test/python_tests/compositing_test.py @@ -1,25 +1,28 @@ -#encoding: utf8 - -from nose.tools import eq_ import os -from utilities import execution_path, run_all -from utilities import get_unique_colors, pixel2channels, side_by_side_image import mapnik +import pytest +from .utilities import (get_unique_colors, pixel2channels, side_by_side_image, execution_path) +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + +def is_pre(color, alpha): + return (color * 255.0 / alpha) <= 255 -def is_pre(color,alpha): - return (color*255.0/alpha) <= 255 -def debug_image(image,step=2): - for x in range(0,image.width(),step): - for y in range(0,image.height(),step): - pixel = image.get_pixel(x,y) - red,green,blue,alpha = pixel2channels(pixel) - print "rgba(%s,%s,%s,%s) at %s,%s" % (red,green,blue,alpha,x,y) +def debug_image(image, step=2): + for x in range(0, image.width(), step): + for y in range(0, image.height(), step): + pixel = image.get_pixel(x, y) + red, green, blue, alpha = pixel2channels(pixel) + print( + "rgba(%s,%s,%s,%s) at %s,%s" % + (red, green, blue, alpha, x, y)) + def replace_style(m, name, style): m.remove_style(name) @@ -27,127 +30,147 @@ def replace_style(m, name, style): # note: it is impossible to know for all pixel colors # we can only detect likely cases of non premultiplied colors + + def validate_pixels_are_not_premultiplied(image): over_alpha = False transparent = True fully_opaque = True - for x in range(0,image.width(),2): - for y in range(0,image.height(),2): - pixel = image.get_pixel(x,y) - red,green,blue,alpha = pixel2channels(pixel) + for x in range(0, image.width(), 2): + for y in range(0, image.height(), 2): + pixel = image.get_pixel(x, y) + red, green, blue, alpha = pixel2channels(pixel) if alpha > 0: transparent = False if alpha < 255: fully_opaque = False - color_max = max(red,green,blue) + color_max = max(red, green, blue) if color_max > alpha: over_alpha = True return over_alpha or transparent or fully_opaque + def validate_pixels_are_not_premultiplied2(image): looks_not_multiplied = False - for x in range(0,image.width(),2): - for y in range(0,image.height(),2): - pixel = image.get_pixel(x,y) - red,green,blue,alpha = pixel2channels(pixel) - #each value of the color channels will never be bigger than that of the alpha channel. + for x in range(0, image.width(), 2): + for y in range(0, image.height(), 2): + pixel = image.get_pixel(x, y) + red, green, blue, alpha = pixel2channels(pixel) + # each value of the color channels will never be bigger than that + # of the alpha channel. if alpha > 0: if red > 0 and red > alpha: - print 'red: %s, a: %s' % (red,alpha) + print('red: %s, a: %s' % (red, alpha)) looks_not_multiplied = True return looks_not_multiplied + def validate_pixels_are_premultiplied(image): bad_pixels = [] - for x in range(0,image.width(),2): - for y in range(0,image.height(),2): - pixel = image.get_pixel(x,y) - red,green,blue,alpha = pixel2channels(pixel) + for x in range(0, image.width(), 2): + for y in range(0, image.height(), 2): + pixel = image.get_pixel(x, y) + red, green, blue, alpha = pixel2channels(pixel) if alpha > 0: - pixel = image.get_pixel(x,y) - is_valid = ((0 <= red <= alpha) and is_pre(red,alpha)) \ - and ((0 <= green <= alpha) and is_pre(green,alpha)) \ - and ((0 <= blue <= alpha) and is_pre(blue,alpha)) \ - and (alpha >= 0 and alpha <= 255) + pixel = image.get_pixel(x, y) + is_valid = ((0 <= red <= alpha) and is_pre(red, alpha)) \ + and ((0 <= green <= alpha) and is_pre(green, alpha)) \ + and ((0 <= blue <= alpha) and is_pre(blue, alpha)) \ + and (alpha >= 0 and alpha <= 255) if not is_valid: - bad_pixels.append("rgba(%s,%s,%s,%s) at %s,%s" % (red,green,blue,alpha,x,y)) + bad_pixels.append( + "rgba(%s,%s,%s,%s) at %s,%s" % + (red, green, blue, alpha, x, y)) num_bad = len(bad_pixels) - return (num_bad == 0,bad_pixels) + return (num_bad == 0, bad_pixels) + -def test_compare_images(): - b = mapnik.Image.open('./images/support/b.png') +def test_compare_images(setup): + b = mapnik.Image.open('images/support/b.png') b.premultiply() - num_ops = len(mapnik.CompositeOp.names) + num_ops = len(mapnik.CompositeOp.__members__) successes = [] fails = [] - for name in mapnik.CompositeOp.names: - a = mapnik.Image.open('./images/support/a.png') + for name in mapnik.CompositeOp.__members__.keys(): + a = mapnik.Image.open('images/support/a.png') a.premultiply() - a.composite(b,getattr(mapnik.CompositeOp,name)) + a.composite(b, getattr(mapnik.CompositeOp, name)) actual = '/tmp/mapnik-comp-op-test-' + name + '.png' expected = 'images/composited/' + name + '.png' valid = validate_pixels_are_premultiplied(a) if not valid[0]: - fails.append('%s not validly premultiplied!:\n\t %s pixels (%s)' % (name,len(valid[1]),valid[1][0])) + fails.append( + '%s not validly premultiplied!:\n\t %s pixels (%s)' % + (name, len( + valid[1]), valid[1][0])) a.demultiply() if not validate_pixels_are_not_premultiplied(a): fails.append('%s not validly demultiplied' % (name)) - a.save(actual,'png32') + a.save(actual, 'png32') if not os.path.exists(expected) or os.environ.get('UPDATE'): - print 'generating expected test image: %s' % expected - a.save(expected,'png32') + print('generating expected test image: %s' % expected) + a.save(expected, 'png32') expected_im = mapnik.Image.open(expected) # compare them - if a.tostring('png32') == expected_im.tostring('png32'): + if a.to_string('png32') == expected_im.to_string('png32'): successes.append(name) else: - fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected)) + fails.append( + 'failed comparing actual (%s) and expected(%s)' % + (actual, 'tests/python_tests/' + expected)) fail_im = side_by_side_image(expected_im, a) - fail_im.save('/tmp/mapnik-comp-op-test-' + name + '.fail.png','png32') - eq_(len(successes),num_ops,'\n'+'\n'.join(fails)) + fail_im.save( + '/tmp/mapnik-comp-op-test-' + + name + + '.fail.png', + 'png32') + assert len(successes) == num_ops, '\n' + '\n'.join(fails) b.demultiply() # b will be slightly modified by pre and then de multiplication rounding errors # TODO - write test to ensure the image is 99% the same. #expected_b = mapnik.Image.open('./images/support/b.png') - #b.save('/tmp/mapnik-comp-op-test-original-mask.png') - #eq_(b.tostring('png32'),expected_b.tostring('png32'), '/tmp/mapnik-comp-op-test-original-mask.png is no longer equivalent to original mask: ./images/support/b.png') + # b.save('/tmp/mapnik-comp-op-test-original-mask.png') + #assert b.to_string('png32') == expected_b.to_string('png32'), '/tmp/mapnik-comp-op-test-original-mask.png is no longer equivalent to original mask: ./images/support/b.png' + def test_pre_multiply_status(): - b = mapnik.Image.open('./images/support/b.png') + b = mapnik.Image.open('images/support/b.png') # not premultiplied yet, should appear that way result = validate_pixels_are_not_premultiplied(b) - eq_(result,True) + assert result # not yet premultiplied therefore should return false result = validate_pixels_are_premultiplied(b) - eq_(result[0],False) + assert not result[0] # now actually premultiply the pixels b.premultiply() # now checking if premultiplied should succeed result = validate_pixels_are_premultiplied(b) - eq_(result[0],True) + assert result[0] # should now not appear to look not premultiplied result = validate_pixels_are_not_premultiplied(b) - eq_(result,False) + assert not result # now actually demultiply the pixels b.demultiply() # should now appear demultiplied result = validate_pixels_are_not_premultiplied(b) - eq_(result,True) + assert result + def test_pre_multiply_status_of_map1(): - m = mapnik.Map(256,256) - im = mapnik.Image(m.width,m.height) - eq_(validate_pixels_are_not_premultiplied(im),True) - mapnik.render(m,im) - eq_(validate_pixels_are_not_premultiplied(im),True) + m = mapnik.Map(256, 256) + im = mapnik.Image(m.width, m.height) + assert validate_pixels_are_not_premultiplied(im) + mapnik.render(m, im) + assert validate_pixels_are_not_premultiplied(im) + def test_pre_multiply_status_of_map2(): - m = mapnik.Map(256,256) - m.background = mapnik.Color(1,1,1,255) - im = mapnik.Image(m.width,m.height) - eq_(validate_pixels_are_not_premultiplied(im),True) - mapnik.render(m,im) - eq_(validate_pixels_are_not_premultiplied(im),True) + m = mapnik.Map(256, 256) + m.background = mapnik.Color(1, 1, 1, 255) + im = mapnik.Image(m.width, m.height) + assert validate_pixels_are_not_premultiplied(im) + mapnik.render(m, im) + assert validate_pixels_are_not_premultiplied(im) if 'shape' in mapnik.DatasourceCache.plugin_names(): def test_style_level_comp_op(): @@ -156,7 +179,8 @@ def test_style_level_comp_op(): m.zoom_all() successes = [] fails = [] - for name in mapnik.CompositeOp.names: + + for name in mapnik.CompositeOp.__members__.keys(): # find_style returns a copy of the style object style_markers = m.find_style("markers") style_markers.comp_op = getattr(mapnik.CompositeOp, name) @@ -166,84 +190,95 @@ def test_style_level_comp_op(): mapnik.render(m, im) actual = '/tmp/mapnik-style-comp-op-' + name + '.png' expected = 'images/style-comp-op/' + name + '.png' - im.save(actual,'png32') + im.save(actual, 'png32') if not os.path.exists(expected) or os.environ.get('UPDATE'): - print 'generating expected test image: %s' % expected - im.save(expected,'png32') + print('generating expected test image: %s' % expected) + im.save(expected, 'png32') expected_im = mapnik.Image.open(expected) # compare them - if im.tostring('png32') == expected_im.tostring('png32'): + if im.to_string('png32') == expected_im.to_string('png32'): successes.append(name) else: - fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected)) + fails.append( + 'failed comparing actual (%s) and expected(%s)' % + (actual, 'tests/python_tests/' + expected)) fail_im = side_by_side_image(expected_im, im) - fail_im.save('/tmp/mapnik-style-comp-op-' + name + '.fail.png','png32') - eq_(len(fails), 0, '\n'+'\n'.join(fails)) + fail_im.save( + '/tmp/mapnik-style-comp-op-' + + name + + '.fail.png', + 'png32') + assert len(fails) == 0, '\n' + '\n'.join(fails) def test_style_level_opacity(): - m = mapnik.Map(512,512) - mapnik.load_map(m,'../data/good_maps/style_level_opacity_and_blur.xml') + m = mapnik.Map(512, 512) + mapnik.load_map( + m, '../data/good_maps/style_level_opacity_and_blur.xml') m.zoom_all() - im = mapnik.Image(512,512) - mapnik.render(m,im) + im = mapnik.Image(512, 512) + mapnik.render(m, im) actual = '/tmp/mapnik-style-level-opacity.png' expected = 'images/support/mapnik-style-level-opacity.png' - im.save(actual,'png32') + im.save(actual, 'png32') expected_im = mapnik.Image.open(expected) - eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected)) + assert im.to_string('png32') == expected_im.to_string('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual, + 'tests/python_tests/' + expected) + def test_rounding_and_color_expectations(): - m = mapnik.Map(1,1) + m = mapnik.Map(1, 1) m.background = mapnik.Color('rgba(255,255,255,.4999999)') - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) - eq_(get_unique_colors(im),['rgba(255,255,255,127)']) - m = mapnik.Map(1,1) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) + assert get_unique_colors(im) == ['rgba(255,255,255,127)'] + m = mapnik.Map(1, 1) m.background = mapnik.Color('rgba(255,255,255,.5)') - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) - eq_(get_unique_colors(im),['rgba(255,255,255,128)']) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) + assert get_unique_colors(im) == ['rgba(255,255,255,128)'] im_file = mapnik.Image.open('../data/images/stripes_pattern.png') - eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,255)']) + assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'] # should have no effect im_file.premultiply() - eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,255)']) + assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'] im_file.apply_opacity(.5) # should have effect now that image has transparency im_file.premultiply() - eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(37,37,37,127)']) + assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(37,37,37,127)'] # should restore to original nonpremultiplied colors im_file.demultiply() - eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,127)']) + assert get_unique_colors(im_file) == ['rgba(0,0,0,0)', 'rgba(74,74,74,127)'] def test_background_image_and_background_color(): - m = mapnik.Map(8,8) + m = mapnik.Map(8, 8) m.background = mapnik.Color('rgba(255,255,255,.5)') m.background_image = '../data/images/stripes_pattern.png' - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) - eq_(get_unique_colors(im),['rgba(255,255,255,128)', 'rgba(74,74,74,255)']) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) + assert get_unique_colors(im) == ['rgba(255,255,255,128)', 'rgba(74,74,74,255)'] + def test_background_image_with_alpha_and_background_color(): - m = mapnik.Map(10,10) + m = mapnik.Map(10, 10) m.background = mapnik.Color('rgba(255,255,255,.5)') m.background_image = '../data/images/yellow_half_trans.png' - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) - eq_(get_unique_colors(im),['rgba(255,255,85,191)']) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) + assert get_unique_colors(im) == ['rgba(255,255,85,191)'] + def test_background_image_with_alpha_and_background_color_against_composited_control(): - m = mapnik.Map(10,10) + m = mapnik.Map(10, 10) m.background = mapnik.Color('rgba(255,255,255,.5)') m.background_image = '../data/images/yellow_half_trans.png' - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) # create and composite the expected result - im1 = mapnik.Image(10,10) + im1 = mapnik.Image(10, 10) im1.fill(mapnik.Color('rgba(255,255,255,.5)')) im1.premultiply() - im2 = mapnik.Image(10,10) + im2 = mapnik.Image(10, 10) im2.fill(mapnik.Color('rgba(255,255,0,.5)')) im2.premultiply() im1.composite(im2) @@ -251,8 +286,4 @@ def test_background_image_with_alpha_and_background_color_against_composited_con # compare image rendered (compositing in `agg_renderer::setup`) # vs image composited via python bindings #raise Todo("looks like we need to investigate PNG color rounding when saving") - #eq_(get_unique_colors(im),get_unique_colors(im1)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + #assert get_unique_colors(im) == get_unique_colors(im1) diff --git a/test/python_tests/copy_test.py b/test/python_tests/copy_test.py index d3cf9b15c..d08a21d0a 100644 --- a/test/python_tests/copy_test.py +++ b/test/python_tests/copy_test.py @@ -1,93 +1,84 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik -from nose.tools import * -from utilities import execution_path, run_all - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) +import mapnik def test_image_16_8_simple(): - im = mapnik.Image(2,2,mapnik.ImageType.gray16) - im.set_pixel(0,0, 256) - im.set_pixel(0,1, 999) - im.set_pixel(1,0, 5) - im.set_pixel(1,1, 2) + im = mapnik.Image(2, 2, mapnik.ImageType.gray16) + im.set_pixel(0, 0, 256) + im.set_pixel(0, 1, 999) + im.set_pixel(1, 0, 5) + im.set_pixel(1, 1, 2) im2 = im.copy(mapnik.ImageType.gray8) - eq_(im2.get_pixel(0,0), 255) - eq_(im2.get_pixel(0,1), 255) - eq_(im2.get_pixel(1,0), 5) - eq_(im2.get_pixel(1,1), 2) + assert im2.get_pixel(0, 0) == 255 + assert im2.get_pixel(0, 1) == 255 + assert im2.get_pixel(1, 0) == 5 + assert im2.get_pixel(1, 1) == 2 # Cast back! im = im2.copy(mapnik.ImageType.gray16) - eq_(im.get_pixel(0,0), 255) - eq_(im.get_pixel(0,1), 255) - eq_(im.get_pixel(1,0), 5) - eq_(im.get_pixel(1,1), 2) + assert im.get_pixel(0, 0) == 255 + assert im.get_pixel(0, 1) == 255 + assert im.get_pixel(1, 0) == 5 + assert im.get_pixel(1, 1) == 2 + def test_image_32f_8_simple(): - im = mapnik.Image(2,2,mapnik.ImageType.gray32f) - im.set_pixel(0,0, 120.1234) - im.set_pixel(0,1, -23.4) - im.set_pixel(1,0, 120.6) - im.set_pixel(1,1, 360.2) + im = mapnik.Image(2, 2, mapnik.ImageType.gray32f) + im.set_pixel(0, 0, 120.1234) + im.set_pixel(0, 1, -23.4) + im.set_pixel(1, 0, 120.6) + im.set_pixel(1, 1, 360.2) im2 = im.copy(mapnik.ImageType.gray8) - eq_(im2.get_pixel(0,0), 120) - eq_(im2.get_pixel(0,1), 0) - eq_(im2.get_pixel(1,0), 120) # Notice this is truncated! - eq_(im2.get_pixel(1,1), 255) + assert im2.get_pixel(0, 0) == 120 + assert im2.get_pixel(0, 1) == 0 + assert im2.get_pixel(1, 0) == 120 # Notice this is truncated! + assert im2.get_pixel(1, 1) == 255 + def test_image_offset_and_scale(): - im = mapnik.Image(2,2,mapnik.ImageType.gray16) - eq_(im.offset, 0.0) - eq_(im.scaling, 1.0) + im = mapnik.Image(2, 2, mapnik.ImageType.gray16) + assert im.offset == 0.0 + assert im.scaling == 1.0 im.offset = 1.0 im.scaling = 2.0 - eq_(im.offset, 1.0) - eq_(im.scaling, 2.0) + assert im.offset == 1.0 + assert im.scaling == 2.0 + def test_image_16_8_scale_and_offset(): - im = mapnik.Image(2,2,mapnik.ImageType.gray16) - im.set_pixel(0,0, 256) - im.set_pixel(0,1, 258) - im.set_pixel(1,0, 99999) - im.set_pixel(1,1, 615) + im = mapnik.Image(2, 2, mapnik.ImageType.gray16) + im.set_pixel(0, 0, 256) + im.set_pixel(0, 1, 258) + im.set_pixel(1, 0, 99999) + im.set_pixel(1, 1, 615) offset = 255 scaling = 3 im2 = im.copy(mapnik.ImageType.gray8, offset, scaling) - eq_(im2.get_pixel(0,0), 0) - eq_(im2.get_pixel(0,1), 1) - eq_(im2.get_pixel(1,0), 255) - eq_(im2.get_pixel(1,1), 120) + assert im2.get_pixel(0, 0) == 0 + assert im2.get_pixel(0, 1) == 1 + assert im2.get_pixel(1, 0) == 255 + assert im2.get_pixel(1, 1) == 120 # pixels will be a little off due to offsets in reverting! im3 = im2.copy(mapnik.ImageType.gray16) - eq_(im3.get_pixel(0,0), 255) # Rounding error with ints - eq_(im3.get_pixel(0,1), 258) # same - eq_(im3.get_pixel(1,0), 1020) # The other one was way out of range for our scale/offset - eq_(im3.get_pixel(1,1), 615) # same + assert im3.get_pixel(0, 0) == 255 # Rounding error with ints + assert im3.get_pixel(0, 1) == 258 # same + # The other one was way out of range for our scale/offset + assert im3.get_pixel(1, 0) == 1020 + assert im3.get_pixel(1, 1) == 615 # same + def test_image_16_32f_scale_and_offset(): - im = mapnik.Image(2,2,mapnik.ImageType.gray16) - im.set_pixel(0,0, 256) - im.set_pixel(0,1, 258) - im.set_pixel(1,0, 0) - im.set_pixel(1,1, 615) + im = mapnik.Image(2, 2, mapnik.ImageType.gray16) + im.set_pixel(0, 0, 256) + im.set_pixel(0, 1, 258) + im.set_pixel(1, 0, 0) + im.set_pixel(1, 1, 615) offset = 255 scaling = 3.2 im2 = im.copy(mapnik.ImageType.gray32f, offset, scaling) - eq_(im2.get_pixel(0,0), 0.3125) - eq_(im2.get_pixel(0,1), 0.9375) - eq_(im2.get_pixel(1,0), -79.6875) - eq_(im2.get_pixel(1,1), 112.5) + assert im2.get_pixel(0, 0) == 0.3125 + assert im2.get_pixel(0, 1) == 0.9375 + assert im2.get_pixel(1, 0) == -79.6875 + assert im2.get_pixel(1, 1) == 112.5 im3 = im2.copy(mapnik.ImageType.gray16) - eq_(im3.get_pixel(0,0), 256) - eq_(im3.get_pixel(0,1), 258) - eq_(im3.get_pixel(1,0), 0) - eq_(im3.get_pixel(1,1), 615) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im3.get_pixel(0, 0) == 256 + assert im3.get_pixel(0, 1) == 258 + assert im3.get_pixel(1, 0) == 0 + assert im3.get_pixel(1, 1) == 615 diff --git a/test/python_tests/csv_test.py b/test/python_tests/csv_test.py index 5011f5777..fdbff69ad 100644 --- a/test/python_tests/csv_test.py +++ b/test/python_tests/csv_test.py @@ -1,30 +1,23 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - import glob -from nose.tools import eq_,raises -from utilities import execution_path - -import os, mapnik - -default_logging_severity = mapnik.logger.get_severity() +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): - # make the tests silent since we intentially test error conditions that are noisy - mapnik.logger.set_severity(mapnik.severity_type.None) # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) - -def teardown(): - mapnik.logger.set_severity(default_logging_severity) + yield if 'csv' in mapnik.DatasourceCache.plugin_names(): def get_csv_ds(filename): - return mapnik.Datasource(type='csv',file=os.path.join('../data/csv/',filename)) + return mapnik.Datasource( + type='csv', file=os.path.join('../data/csv/', filename)) - def test_broken_files(visual=False): + def test_broken_files(setup, visual=False): broken = glob.glob("../data/csv/fails/*.*") broken.extend(glob.glob("../data/csv/warns/*.*")) @@ -34,232 +27,256 @@ def test_broken_files(visual=False): for csv in broken: if visual: try: - mapnik.Datasource(type='csv',file=csv,strict=True) - print '\x1b[33mfailed: should have thrown\x1b[0m',csv + mapnik.Datasource(type='csv', file=csv, strict=True) + print('\x1b[33mfailed: should have thrown\x1b[0m', csv) except Exception: - print '\x1b[1;32m✓ \x1b[0m', csv + print('\x1b[1;32m✓ \x1b[0m', csv) - def test_good_files(visual=False): + def test_good_files(setup, visual=False): good_files = glob.glob("../data/csv/*.*") good_files.extend(glob.glob("../data/csv/warns/*.*")) - ignorable = os.path.join('..','data','csv','long_lat.vrt') + ignorable = os.path.join('..', 'data', 'csv', 'long_lat.vrt') + print("ignorable:", ignorable) good_files.remove(ignorable) - + for f in good_files: + if f.endswith('.index'): + good_files.remove(f) for csv in good_files: if visual: try: - mapnik.Datasource(type='csv',file=csv) - print '\x1b[1;32m✓ \x1b[0m', csv - except Exception, e: - print '\x1b[33mfailed: should not have thrown\x1b[0m',csv,str(e) + mapnik.Datasource(type='csv', file=csv) + print('\x1b[1;32m✓ \x1b[0m', csv) + except Exception as e: + print( + '\x1b[33mfailed: should not have thrown\x1b[0m', + csv, + str(e)) def test_lon_lat_detection(**kwargs): ds = get_csv_ds('lon_lat.csv') - eq_(len(ds.fields()),2) - eq_(ds.fields(),['lon','lat']) - eq_(ds.field_types(),['int','int']) + assert len(ds.fields()) == 2 + assert ds.fields(), ['lon' == 'lat'] + assert ds.field_types(), ['int' == 'int'] query = mapnik.Query(ds.envelope()) for fld in ds.fields(): query.add_property_name(fld) fs = ds.features(query) desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - feat = fs.next() + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + feat = next(fs) attr = {'lon': 0, 'lat': 0} - eq_(feat.attributes,attr) + assert feat.attributes == attr def test_lng_lat_detection(**kwargs): ds = get_csv_ds('lng_lat.csv') - eq_(len(ds.fields()),2) - eq_(ds.fields(),['lng','lat']) - eq_(ds.field_types(),['int','int']) + assert len(ds.fields()) == 2 + assert ds.fields(), ['lng' == 'lat'] + assert ds.field_types(), ['int' == 'int'] query = mapnik.Query(ds.envelope()) for fld in ds.fields(): query.add_property_name(fld) fs = ds.features(query) desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - feat = fs.next() + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + feat = next(fs) attr = {'lng': 0, 'lat': 0} - eq_(feat.attributes,attr) + assert feat.attributes == attr def test_type_detection(**kwargs): ds = get_csv_ds('nypd.csv') - eq_(ds.fields(),['Precinct','Phone','Address','City','geo_longitude','geo_latitude','geo_accuracy']) - eq_(ds.field_types(),['str','str','str','str','float','float','str']) - feat = ds.featureset().next() - attr = {'City': u'New York, NY', 'geo_accuracy': u'house', 'Phone': u'(212) 334-0711', 'Address': u'19 Elizabeth Street', 'Precinct': u'5th Precinct', 'geo_longitude': -70, 'geo_latitude': 40} - eq_(feat.attributes,attr) - eq_(len(ds.all_features()),2) + assert ds.fields() == ['Precinct', + 'Phone', + 'Address', + 'City', + 'geo_longitude', + 'geo_latitude', + 'geo_accuracy'] + assert ds.field_types() == ['str', 'str', + 'str', 'str', 'float', 'float', 'str'] + feat = next(iter(ds)) + attr = { + 'City': u'New York, NY', + 'geo_accuracy': u'house', + 'Phone': u'(212) 334-0711', + 'Address': u'19 Elizabeth Street', + 'Precinct': u'5th Precinct', + 'geo_longitude': -70, + 'geo_latitude': 40} + assert feat.attributes == attr + assert len(list(iter(ds))) == 2 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_skipping_blank_rows(**kwargs): ds = get_csv_ds('blank_rows.csv') - eq_(ds.fields(),['x','y','name']) - eq_(ds.field_types(),['int','int','str']) - eq_(len(ds.all_features()),2) + assert ds.fields(), ['x', 'y' == 'name'] + assert ds.field_types(), ['int', 'int' == 'str'] + assert len(list(iter(ds))) == 2 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_empty_rows(**kwargs): ds = get_csv_ds('empty_rows.csv') - eq_(len(ds.fields()),10) - eq_(len(ds.field_types()),10) - eq_(ds.fields(),['x', 'y', 'text', 'date', 'integer', 'boolean', 'float', 'time', 'datetime', 'empty_column']) - eq_(ds.field_types(),['int', 'int', 'str', 'str', 'int', 'bool', 'float', 'str', 'str', 'str']) - fs = ds.featureset() - attr = {'x': 0, 'empty_column': u'', 'text': u'a b', 'float': 1.0, 'datetime': u'1971-01-01T04:14:00', 'y': 0, 'boolean': True, 'time': u'04:14:00', 'date': u'1971-01-01', 'integer': 40} + assert len(ds.fields()) == 10 + assert len(ds.field_types()) == 10 + assert ds.fields() == ['x', 'y', 'text', 'date', 'integer', + 'boolean', 'float', 'time', 'datetime', 'empty_column'] + assert ds.field_types() == ['int', 'int', 'str', 'str', + 'int', 'bool', 'float', 'str', 'str', 'str'] + fs = iter(ds) + attr = { + 'x': 0, + 'empty_column': u'', + 'text': u'a b', + 'float': 1.0, + 'datetime': u'1971-01-01T04:14:00', + 'y': 0, + 'boolean': True, + 'time': u'04:14:00', + 'date': u'1971-01-01', + 'integer': 40} first = True for feat in fs: if first: - first=False - eq_(feat.attributes,attr) - eq_(len(feat),10) - eq_(feat['empty_column'],u'') + first = False + assert feat.attributes == attr + assert len(feat) == 10 + assert feat['empty_column'] == u'' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_slashes(**kwargs): ds = get_csv_ds('has_attributes_with_slashes.csv') - eq_(len(ds.fields()),3) - fs = ds.all_features() - eq_(fs[0].attributes,{'x':0,'y':0,'name':u'a/a'}) - eq_(fs[1].attributes,{'x':1,'y':4,'name':u'b/b'}) - eq_(fs[2].attributes,{'x':10,'y':2.5,'name':u'c/c'}) + assert len(ds.fields()) == 3 + fs = list(iter(ds)) + assert fs[0].attributes == {'x': 0, 'y': 0, 'name': u'a/a'} + assert fs[1].attributes == {'x': 1, 'y': 4, 'name': u'b/b'} + assert fs[2].attributes == {'x': 10, 'y': 2.5, 'name': u'c/c'} desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_wkt_field(**kwargs): ds = get_csv_ds('wkt.csv') - eq_(len(ds.fields()),1) - eq_(ds.fields(),['type']) - eq_(ds.field_types(),['str']) - fs = ds.all_features() - #eq_(len(fs[0].geometries()),1) - eq_(fs[0].geometry.type(),mapnik.GeometryType.Point) - #eq_(len(fs[1].geometries()),1) - eq_(fs[1].geometry.type(),mapnik.GeometryType.LineString) - #eq_(len(fs[2].geometries()),1) - eq_(fs[2].geometry.type(),mapnik.GeometryType.Polygon) - #eq_(len(fs[3].geometries()),1) # one geometry, two parts - eq_(fs[3].geometry.type(),mapnik.GeometryType.Polygon) - #eq_(len(fs[4].geometries()),4) - eq_(fs[4].geometry.type(),mapnik.GeometryType.MultiPoint) - #eq_(len(fs[5].geometries()),2) - eq_(fs[5].geometry.type(),mapnik.GeometryType.MultiLineString) - #eq_(len(fs[6].geometries()),2) - eq_(fs[6].geometry.type(),mapnik.GeometryType.MultiPolygon) - #eq_(len(fs[7].geometries()),2) - eq_(fs[7].geometry.type(),mapnik.GeometryType.MultiPolygon) + assert len(ds.fields()) == 1 + assert ds.fields() == ['type'] + assert ds.field_types() == ['str'] + fs = list(iter(ds)) + assert fs[0].geometry.type() == mapnik.GeometryType.Point + assert fs[1].geometry.type() == mapnik.GeometryType.LineString + assert fs[2].geometry.type() == mapnik.GeometryType.Polygon + assert fs[3].geometry.type() == mapnik.GeometryType.Polygon + assert fs[4].geometry.type() == mapnik.GeometryType.MultiPoint + assert fs[5].geometry.type() == mapnik.GeometryType.MultiLineString + assert fs[6].geometry.type() == mapnik.GeometryType.MultiPolygon + assert fs[7].geometry.type() == mapnik.GeometryType.MultiPolygon desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Collection) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Collection + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_handling_of_missing_header(**kwargs): ds = get_csv_ds('missing_header.csv') - eq_(len(ds.fields()),6) - eq_(ds.fields(),['one','two','x','y','_4','aftermissing']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['_4'],'missing') + assert len(ds.fields()) == 6 + assert ds.fields() == ['one', 'two', 'x', 'y', '_4', 'aftermissing'] + fs = iter(ds) + feat = next(fs) + assert feat['_4'] == 'missing' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_handling_of_headers_that_are_numbers(**kwargs): ds = get_csv_ds('numbers_for_headers.csv') - eq_(len(ds.fields()),5) - eq_(ds.fields(),['x','y','1990','1991','1992']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['1990'],1) - eq_(feat['1991'],2) - eq_(feat['1992'],3) - eq_(mapnik.Expression("[1991]=2").evaluate(feat),True) + assert len(ds.fields()) == 5 + assert ds.fields() == ['x', 'y', '1990', '1991', '1992'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['1990'] == 1 + assert feat['1991'] == 2 + assert feat['1992'] == 3 + assert mapnik.Expression("[1991]=2").evaluate(feat) def test_quoted_numbers(**kwargs): ds = get_csv_ds('points.csv') - eq_(len(ds.fields()),6) - eq_(ds.fields(),['lat','long','name','nr','color','placements']) - fs = ds.all_features() - eq_(fs[0]['placements'],"N,S,E,W,SW,10,5") - eq_(fs[1]['placements'],"N,S,E,W,SW,10,5") - eq_(fs[2]['placements'],"N,S,E,W,SW,10,5") - eq_(fs[3]['placements'],"N,S,E,W,SW,10,5") - eq_(fs[4]['placements'],"N,S,E,W,SW,10,5") + assert len(ds.fields()) == 6 + assert ds.fields(), ['lat', 'long', 'name', 'nr', 'color' == 'placements'] + fs = list(iter(ds)) + assert fs[0]['placements'] == "N,S,E,W,SW,10,5" + assert fs[1]['placements'] == "N,S,E,W,SW,10,5" + assert fs[2]['placements'] == "N,S,E,W,SW,10,5" + assert fs[3]['placements'] == "N,S,E,W,SW,10,5" + assert fs[4]['placements'] == "N,S,E,W,SW,10,5" desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_reading_windows_newlines(**kwargs): ds = get_csv_ds('windows_newlines.csv') - eq_(len(ds.fields()),3) - feats = ds.all_features() - eq_(len(feats),1) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],1) - eq_(feat['y'],10) - eq_(feat['z'],9999.9999) + assert len(ds.fields()) == 3 + feats = list(iter(ds)) + assert len(feats) == 1 + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 1 + assert feat['y'] == 10 + assert feat['z'] == 9999.9999 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_reading_mac_newlines(**kwargs): ds = get_csv_ds('mac_newlines.csv') - eq_(len(ds.fields()),3) - feats = ds.all_features() - eq_(len(feats),1) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],1) - eq_(feat['y'],10) - eq_(feat['z'],9999.9999) + assert len(ds.fields()) == 3 + feats = list(iter(ds)) + assert len(feats) == 1 + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 1 + assert feat['y'] == 10 + assert feat['z'] == 9999.9999 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def check_newlines(filename): ds = get_csv_ds(filename) - eq_(len(ds.fields()),3) - feats = ds.all_features() - eq_(len(feats),1) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['line'],'many\n lines\n of text\n with unix newlines') + assert len(ds.fields()) == 3 + feats = list(iter(ds)) + assert len(feats) == 1 + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['line'] == 'many\n lines\n of text\n with unix newlines' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_mixed_mac_unix_newlines(**kwargs): check_newlines('mac_newlines_with_unix_inline.csv') @@ -268,11 +285,11 @@ def test_mixed_mac_unix_newlines_escaped(**kwargs): check_newlines('mac_newlines_with_unix_inline_escaped.csv') # To hard to support this case - #def test_mixed_unix_windows_newlines(**kwargs): + # def test_mixed_unix_windows_newlines(**kwargs): # check_newlines('unix_newlines_with_windows_inline.csv') # To hard to support this case - #def test_mixed_unix_windows_newlines_escaped(**kwargs): + # def test_mixed_unix_windows_newlines_escaped(**kwargs): # check_newlines('unix_newlines_with_windows_inline_escaped.csv') def test_mixed_windows_unix_newlines(**kwargs): @@ -283,160 +300,153 @@ def test_mixed_windows_unix_newlines_escaped(**kwargs): def test_tabs(**kwargs): ds = get_csv_ds('tabs_in_csv.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','z']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],-122) - eq_(feat['y'],48) - eq_(feat['z'],0) + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'z'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == -122 + assert feat['y'] == 48 + assert feat['z'] == 0 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_separator_pipes(**kwargs): ds = get_csv_ds('pipe_delimiters.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','z']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['z'],'hello') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'z'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['z'] == 'hello' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_separator_semicolon(**kwargs): ds = get_csv_ds('semicolon_delimiters.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','z']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['z'],'hello') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'z'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['z'] == 'hello' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_that_null_and_bool_keywords_are_empty_strings(**kwargs): ds = get_csv_ds('nulls_and_booleans_as_strings.csv') - eq_(len(ds.fields()),4) - eq_(ds.fields(),['x','y','null','boolean']) - eq_(ds.field_types(),['int', 'int', 'str', 'bool']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['null'],'null') - eq_(feat['boolean'],True) - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['null'],'') - eq_(feat['boolean'],False) + assert len(ds.fields()) == 4 + assert ds.fields(), ['x', 'y', 'null' == 'boolean'] + assert ds.field_types(), ['int', 'int', 'str' == 'bool'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['null'] == 'null' + assert feat['boolean'] == True + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['null'] == '' + assert feat['boolean'] == False desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point - @raises(RuntimeError) def test_that_nonexistant_query_field_throws(**kwargs): - ds = get_csv_ds('lon_lat.csv') - eq_(len(ds.fields()),2) - eq_(ds.fields(),['lon','lat']) - eq_(ds.field_types(),['int','int']) - query = mapnik.Query(ds.envelope()) - for fld in ds.fields(): - query.add_property_name(fld) - # also add an invalid one, triggering throw - query.add_property_name('bogus') - ds.features(query) + with pytest.raises(RuntimeError): + ds = get_csv_ds('lon_lat.csv') + assert len(ds.fields()) == 2 + assert ds.fields(), ['lon' == 'lat'] + assert ds.field_types(), ['int' == 'int'] + query = mapnik.Query(ds.envelope()) + for fld in ds.fields(): + query.add_property_name(fld) + # also add an invalid one, triggering throw + query.add_property_name('bogus') + ds.features(query) + def test_that_leading_zeros_mean_strings(**kwargs): ds = get_csv_ds('leading_zeros.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','fips']) - eq_(ds.field_types(),['int','int','str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['fips'],'001') - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['fips'],'003') - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['fips'],'005') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'fips'] + assert ds.field_types(), ['int', 'int' == 'str'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['fips'] == '001' + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['fips'] == '003' + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['fips'] == '005' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point def test_advanced_geometry_detection(**kwargs): ds = get_csv_ds('point_wkt.csv') - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Point ds = get_csv_ds('poly_wkt.csv') - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Polygon) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Polygon ds = get_csv_ds('multi_poly_wkt.csv') - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Polygon) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Polygon ds = get_csv_ds('line_wkt.csv') - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.LineString) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.LineString def test_creation_of_csv_from_in_memory_string(**kwargs): csv_string = ''' wkt,Name "POINT (120.15 48.47)","Winthrop, WA" - ''' # csv plugin will test lines <= 10 chars for being fully blank - ds = mapnik.Datasource(**{"type":"csv","inline":csv_string}) - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point) - fs = ds.featureset() - feat = fs.next() - eq_(feat['Name'],u"Winthrop, WA") + ''' # csv plugin will test lines <= 10 chars for being fully blank + ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string}) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Point + fs = iter(ds) + feat = next(fs) + assert feat['Name'], u"Winthrop == WA" def test_creation_of_csv_from_in_memory_string_with_uft8(**kwargs): csv_string = ''' wkt,Name "POINT (120.15 48.47)","Québec" - ''' # csv plugin will test lines <= 10 chars for being fully blank - ds = mapnik.Datasource(**{"type":"csv","inline":csv_string}) - eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point) - fs = ds.featureset() - feat = fs.next() - eq_(feat['Name'],u"Québec") + ''' # csv plugin will test lines <= 10 chars for being fully blank + ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string}) + assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Point + fs = iter(ds) + feat = next(fs) + assert feat['Name'] == u"Québec" def validate_geojson_datasource(ds): - eq_(len(ds.fields()),1) - eq_(ds.fields(),['type']) - eq_(ds.field_types(),['str']) - fs = ds.all_features() - #eq_(len(fs[0].geometries()),1) - eq_(fs[0].geometry.type(),mapnik.GeometryType.Point) - #eq_(len(fs[1].geometries()),1) - eq_(fs[1].geometry.type(),mapnik.GeometryType.LineString) - #eq_(len(fs[2].geometries()),1) - eq_(fs[2].geometry.type(), mapnik.GeometryType.Polygon) - #eq_(len(fs[3].geometries()),1) # one geometry, two parts - eq_(fs[3].geometry.type(),mapnik.GeometryType.Polygon) - #eq_(len(fs[4].geometries()),4) - eq_(fs[4].geometry.type(),mapnik.GeometryType.MultiPoint) - #eq_(len(fs[5].geometries()),2) - eq_(fs[5].geometry.type(),mapnik.GeometryType.MultiLineString) - #eq_(len(fs[6].geometries()),2) - eq_(fs[6].geometry.type(),mapnik.GeometryType.MultiPolygon) - #eq_(len(fs[7].geometries()),2) - eq_(fs[7].geometry.type(),mapnik.GeometryType.MultiPolygon) + assert len(ds.fields()) == 1 + assert ds.fields() == ['type'] + assert ds.field_types() == ['str'] + fs = list(iter(ds)) + assert fs[0].geometry.type() == mapnik.GeometryType.Point + assert fs[1].geometry.type() == mapnik.GeometryType.LineString + assert fs[2].geometry.type() == mapnik.GeometryType.Polygon + assert fs[3].geometry.type() == mapnik.GeometryType.Polygon + assert fs[4].geometry.type() == mapnik.GeometryType.MultiPoint + assert fs[5].geometry.type() == mapnik.GeometryType.MultiLineString + assert fs[6].geometry.type() == mapnik.GeometryType.MultiPolygon + assert fs[7].geometry.type() == mapnik.GeometryType.MultiPolygon desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Collection) - eq_(desc['name'],'csv') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Collection + assert desc['name'] == 'csv' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' def test_json_field1(**kwargs): ds = get_csv_ds('geojson_double_quote_escape.csv') @@ -452,153 +462,148 @@ def test_json_field3(**kwargs): def test_that_blank_undelimited_rows_are_still_parsed(**kwargs): ds = get_csv_ds('more_headers_than_column_values.csv') - eq_(len(ds.fields()),5) - eq_(ds.fields(),['x','y','one', 'two','three']) - eq_(ds.field_types(),['int','int','str','str','str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['one'],'') - eq_(feat['two'],'') - eq_(feat['three'],'') + assert len(ds.fields()) == 0 + assert ds.fields() == [] + assert ds.field_types() == [] + fs = list(iter(ds)) + assert len(fs) == 0 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == None - @raises(RuntimeError) def test_that_fewer_headers_than_rows_throws(**kwargs): - # this has invalid header # so throw - get_csv_ds('more_column_values_than_headers.csv') + with pytest.raises(RuntimeError): + # this has invalid header # so throw + get_csv_ds('more_column_values_than_headers.csv') def test_that_feature_id_only_incremented_for_valid_rows(**kwargs): ds = mapnik.Datasource(type='csv', - file=os.path.join('../data/csv/warns','feature_id_counting.csv')) - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','id']) - eq_(ds.field_types(),['int','int','int']) - fs = ds.featureset() + file=os.path.join('../data/csv/warns', 'feature_id_counting.csv')) + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'id'] + assert ds.field_types(), ['int', 'int' == 'int'] + fs = iter(ds) # first - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['id'],1) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['id'] == 1 # second, should have skipped bogus one - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['id'],2) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['id'] == 2 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),2) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 2 def test_dynamically_defining_headers1(**kwargs): ds = mapnik.Datasource(type='csv', - file=os.path.join('../data/csv/fails','needs_headers_two_lines.csv'), + file=os.path.join( + '../data/csv/fails', 'needs_headers_two_lines.csv'), headers='x,y,name') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','name']) - eq_(ds.field_types(),['int','int','str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['name'],'data_name') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'name'] + assert ds.field_types(), ['int', 'int' == 'str'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['name'] == 'data_name' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),2) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 2 def test_dynamically_defining_headers2(**kwargs): ds = mapnik.Datasource(type='csv', - file=os.path.join('../data/csv/fails','needs_headers_one_line.csv'), + file=os.path.join( + '../data/csv/fails', 'needs_headers_one_line.csv'), headers='x,y,name') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','name']) - eq_(ds.field_types(),['int','int','str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['name'],'data_name') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'name'] + assert ds.field_types(), ['int', 'int' == 'str'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['name'] == 'data_name' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),1) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 1 def test_dynamically_defining_headers3(**kwargs): ds = mapnik.Datasource(type='csv', - file=os.path.join('../data/csv/fails','needs_headers_one_line_no_newline.csv'), + file=os.path.join( + '../data/csv/fails', 'needs_headers_one_line_no_newline.csv'), headers='x,y,name') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','name']) - eq_(ds.field_types(),['int','int','str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['x'],0) - eq_(feat['y'],0) - eq_(feat['name'],'data_name') + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'name'] + assert ds.field_types(), ['int', 'int' == 'str'] + fs = iter(ds) + feat = next(fs) + assert feat['x'] == 0 + assert feat['y'] == 0 + assert feat['name'] == 'data_name' desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),1) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 1 def test_that_64bit_int_fields_work(**kwargs): ds = get_csv_ds('64bit_int.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','bigint']) - eq_(ds.field_types(),['int','int','int']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['bigint'],2147483648) - feat = fs.next() - eq_(feat['bigint'],9223372036854775807) - eq_(feat['bigint'],0x7FFFFFFFFFFFFFFF) + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'bigint'] + assert ds.field_types(), ['int', 'int' == 'int'] + fs = iter(ds) + feat = next(fs) + assert feat['bigint'] == 2147483648 + feat = next(fs) + assert feat['bigint'] == 9223372036854775807 + assert feat['bigint'] == 0x7FFFFFFFFFFFFFFF desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),2) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 2 def test_various_number_types(**kwargs): ds = get_csv_ds('number_types.csv') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['x','y','floats']) - eq_(ds.field_types(),['int','int','float']) - fs = ds.featureset() - feat = fs.next() - eq_(feat['floats'],.0) - feat = fs.next() - eq_(feat['floats'],+.0) - feat = fs.next() - eq_(feat['floats'],1e-06) - feat = fs.next() - eq_(feat['floats'],-1e-06) - feat = fs.next() - eq_(feat['floats'],0.000001) - feat = fs.next() - eq_(feat['floats'],1.234e+16) - feat = fs.next() - eq_(feat['floats'],1.234e+16) + assert len(ds.fields()) == 3 + assert ds.fields(), ['x', 'y' == 'floats'] + assert ds.field_types(), ['int', 'int' == 'float'] + fs = iter(ds) + feat = next(fs) + assert feat['floats'] == .0 + feat = next(fs) + assert feat['floats'] == +.0 + feat = next(fs) + assert feat['floats'] == 1e-06 + feat = next(fs) + assert feat['floats'] == -1e-06 + feat = next(fs) + assert feat['floats'] == 0.000001 + feat = next(fs) + assert feat['floats'] == 1.234e+16 + feat = next(fs) + assert feat['floats'] == 1.234e+16 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(len(ds.all_features()),8) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert len(list(iter(ds))) == 8 def test_manually_supplied_extent(**kwargs): csv_string = ''' wkt,Name ''' - ds = mapnik.Datasource(**{"type":"csv","extent":"-180,-90,180,90","inline":csv_string}) + ds = mapnik.Datasource( + **{"type": "csv", "extent": "-180,-90,180,90", "inline": csv_string}) b = ds.envelope() - eq_(b.minx,-180) - eq_(b.miny,-90) - eq_(b.maxx,180) - eq_(b.maxy,90) + assert b.minx == -180 + assert b.miny == -90 + assert b.maxx == 180 + assert b.maxy == 90 def test_inline_geojson(**kwargs): csv_string = "geojson\n'{\"coordinates\":[-92.22568,38.59553],\"type\":\"Point\"}'" - ds = mapnik.Datasource(**{"type":"csv","inline":csv_string}) - eq_(len(ds.fields()),0) - eq_(ds.fields(),[]) - # FIXME - re-enable after https://github.com/mapnik/mapnik/issues/2319 is fixed - #fs = ds.featureset() - #feat = fs.next() - #eq_(feat.num_geometries(),1) - -if __name__ == "__main__": - setup() - [eval(run)(visual=True) for run in dir() if 'test_' in run] + ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string}) + assert len(ds.fields()) == 0 + assert ds.fields() == [] + fs = iter(ds) + feat = next(fs) + assert feat.geometry.type() == mapnik.GeometryType.Point + assert feat.geometry.to_wkt() == "POINT(-92.22568 38.59553)" diff --git a/test/python_tests/datasource_test.py b/test/python_tests/datasource_test.py index 4ada3dc3c..13be4c6f0 100644 --- a/test/python_tests/datasource_test.py +++ b/test/python_tests/datasource_test.py @@ -1,168 +1,175 @@ -#!/usr/bin/env python - -from nose.tools import eq_, raises -from utilities import execution_path, run_all -import os, mapnik +import os +import sys +import mapnik +import pytest +from .utilities import execution_path from itertools import groupby +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield def test_that_datasources_exist(): if len(mapnik.DatasourceCache.plugin_names()) == 0: - print '***NOTICE*** - no datasource plugins have been loaded' + print('***NOTICE*** - no datasource plugins have been loaded') # adapted from raster_symboliser_test#test_dataraster_query_point -@raises(RuntimeError) -def test_vrt_referring_to_missing_files(): - srs = '+init=epsg:32630' - if 'gdal' in mapnik.DatasourceCache.plugin_names(): - lyr = mapnik.Layer('dataraster') - lyr.datasource = mapnik.Gdal( - file = '../data/raster/missing_raster.vrt', - band = 1, +def test_vrt_referring_to_missing_files(setup): + with pytest.raises(RuntimeError): + srs = 'epsg:32630' + if 'gdal' in mapnik.DatasourceCache.plugin_names(): + lyr = mapnik.Layer('dataraster') + lyr.datasource = mapnik.Gdal( + file='../data/raster/missing_raster.vrt', + band=1, ) - lyr.srs = srs - _map = mapnik.Map(256, 256, srs) - _map.layers.append(lyr) - - # center of extent of raster - x, y = 556113.0,4381428.0 # center of extent of raster - - _map.zoom_all() - - # Fancy stuff to supress output of error - # open 2 fds - null_fds = [os.open(os.devnull, os.O_RDWR) for x in xrange(2)] - # save the current file descriptors to a tuple - save = os.dup(1), os.dup(2) - # put /dev/null fds on 1 and 2 - os.dup2(null_fds[0], 1) - os.dup2(null_fds[1], 2) - - # *** run the function *** - try: - # Should RuntimeError here - _map.query_point(0, x, y).features - finally: - # restore file descriptors so I can print the results - os.dup2(save[0], 1) - os.dup2(save[1], 2) - # close the temporary fds - os.close(null_fds[0]) - os.close(null_fds[1]) + lyr.srs = srs + _map = mapnik.Map(256, 256, srs) + _map.layers.append(lyr) + + # center of extent of raster + x, y = 556113.0, 4381428.0 # center of extent of raster + _map.zoom_all() + + # Fancy stuff to supress output of error + # open 2 fds + null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)] + # save the current file descriptors to a tuple + save = os.dup(1), os.dup(2) + # put /dev/null fds on 1 and 2 + os.dup2(null_fds[0], 1) + os.dup2(null_fds[1], 2) + + # *** run the function *** + try: + # Should RuntimeError here + list(_map.query_point(0, x, y)) + finally: + # restore file descriptors so I can print the results + os.dup2(save[0], 1) + os.dup2(save[1], 2) + # close the temporary fds + os.close(null_fds[0]) + os.close(null_fds[1]) def test_field_listing(): if 'shape' in mapnik.DatasourceCache.plugin_names(): ds = mapnik.Shapefile(file='../data/shp/poly.shp') fields = ds.fields() - eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA']) + assert fields, ['AREA', 'EAS_ID' == 'PRFEDEA'] desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon) - eq_(desc['name'],'shape') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') + assert desc['geometry_type'] == mapnik.DataGeometryType.Polygon + assert desc['name'] == 'shape' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' + def test_total_feature_count_shp(): if 'shape' in mapnik.DatasourceCache.plugin_names(): ds = mapnik.Shapefile(file='../data/shp/poly.shp') - features = ds.all_features() - num_feats = len(features) - eq_(num_feats, 10) + features = iter(ds) + num_feats = len(list(features)) + assert num_feats == 10 def test_total_feature_count_json(): if 'ogr' in mapnik.DatasourceCache.plugin_names(): - ds = mapnik.Ogr(file='../data/json/points.geojson',layer_by_index=0) + ds = mapnik.Ogr(file='../data/json/points.geojson', layer_by_index=0) desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(desc['name'],'ogr') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') - features = ds.all_features() - num_feats = len(features) - eq_(num_feats, 5) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert desc['name'] == 'ogr' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' + features = iter(ds) + num_feats = len(list(features)) + assert num_feats == 5 + def test_sqlite_reading(): if 'sqlite' in mapnik.DatasourceCache.plugin_names(): - ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0) + ds = mapnik.SQLite( + file='../data/sqlite/world.sqlite', + table_by_index=0) desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon) - eq_(desc['name'],'sqlite') - eq_(desc['type'],mapnik.DataType.Vector) - eq_(desc['encoding'],'utf-8') - features = ds.all_features() - num_feats = len(features) - eq_(num_feats, 245) + assert desc['geometry_type'] == mapnik.DataGeometryType.Polygon + assert desc['name'] == 'sqlite' + assert desc['type'] == mapnik.DataType.Vector + assert desc['encoding'] == 'utf-8' + features = iter(ds) + num_feats = len(list(features)) + assert num_feats == 245 + def test_reading_json_from_string(): - json = open('../data/json/points.geojson','r').read() + with open('../data/json/points.geojson', 'r') as f: + json = f.read() if 'ogr' in mapnik.DatasourceCache.plugin_names(): - ds = mapnik.Ogr(file=json,layer_by_index=0) - features = ds.all_features() - num_feats = len(features) - eq_(num_feats, 5) + ds = mapnik.Ogr(file=json, layer_by_index=0) + features = iter(ds) + num_feats = len(list(features)) + assert num_feats == 5 + def test_feature_envelope(): if 'shape' in mapnik.DatasourceCache.plugin_names(): ds = mapnik.Shapefile(file='../data/shp/poly.shp') - features = ds.all_features() - for feat in features: + for feat in ds: env = feat.envelope() contains = ds.envelope().contains(env) - eq_(contains, True) + assert contains == True intersects = ds.envelope().contains(env) - eq_(intersects, True) + assert intersects == True + def test_feature_attributes(): if 'shape' in mapnik.DatasourceCache.plugin_names(): ds = mapnik.Shapefile(file='../data/shp/poly.shp') - features = ds.all_features() + features = list(iter(ds)) feat = features[0] - attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266} - eq_(feat.attributes, attrs) - eq_(ds.fields(),['AREA', 'EAS_ID', 'PRFEDEA']) - eq_(ds.field_types(),['float','int','str']) + attrs = {'AREA': 215229.266, 'EAS_ID': 168, 'PRFEDEA': '35043411'} + assert feat.attributes == attrs + assert ds.fields(), ['AREA', 'EAS_ID', 'PRFEDEA'] + assert ds.field_types(), ['float', 'int', 'str'] + def test_ogr_layer_by_sql(): if 'ogr' in mapnik.DatasourceCache.plugin_names(): - ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168') - features = ds.all_features() - num_feats = len(features) - eq_(num_feats, 1) + ds = mapnik.Ogr(file='../data/shp/poly.shp', + layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168') + features = iter(ds) + num_feats = len(list(features)) + assert num_feats == 1 + def test_hit_grid(): def rle_encode(l): """ encode a list of strings with run-length compression """ - return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)] + return ["%d:%s" % (len(list(group)), name) + for name, group in groupby(l)] - m = mapnik.Map(256,256); + m = mapnik.Map(256, 256) try: - mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml'); + mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml') m.zoom_all() join_field = 'NAME' - fg = [] # feature grid + fg = [] # feature grid for y in range(0, 256, 4): for x in range(0, 256, 4): - featureset = m.query_map_point(0,x,y) + featureset = m.query_map_point(0, x, y) added = False - for feature in featureset.features: + for feature in featureset: fg.append(feature[join_field]) added = True if not added: fg.append('') hit_list = '|'.join(rle_encode(fg)) - eq_(hit_list[:16],'730:|2:Greenland') - eq_(hit_list[-12:],'1:Chile|812:') - except RuntimeError, e: + assert hit_list[:16] == '730:|2:Greenland' + assert hit_list[-12:] == '1:Chile|812:' + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e): raise RuntimeError(str(e)) - - -if __name__ == '__main__': - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/datasource_xml_template_test.py b/test/python_tests/datasource_xml_template_test.py index 38a73a3e2..6c5de3587 100644 --- a/test/python_tests/datasource_xml_template_test.py +++ b/test/python_tests/datasource_xml_template_test.py @@ -1,23 +1,28 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - import os -from utilities import execution_path, run_all import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_datasource_template_is_working(): - m = mapnik.Map(256,256) - try: - mapnik.load_map(m,'../data/good_maps/datasource.xml') - except RuntimeError, e: - if "Required parameter 'type'" in str(e): - raise RuntimeError(e) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +def test_datasource_template_is_working(setup): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/datasource.xml') + for layer in m.layers: + layer_bbox = layer.envelope() + bbox = None + first = True + for feature in layer.datasource: + assert feature.envelope() == feature.geometry.envelope() + assert layer_bbox.contains(feature.envelope()) + if first: + first = False + bbox = feature.envelope() + else: + bbox += feature.envelope() + assert layer_bbox == bbox diff --git a/test/python_tests/extra_map_props_test.py b/test/python_tests/extra_map_props_test.py index 045cddbdb..213b22b36 100644 --- a/test/python_tests/extra_map_props_test.py +++ b/test/python_tests/extra_map_props_test.py @@ -1,54 +1,51 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + -def test_arbitrary_parameters_attached_to_map(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/extra_arbitary_map_parameters.xml') - eq_(len(m.parameters),5) - eq_(m.parameters['key'],'value2') - eq_(m.parameters['key3'],'value3') - eq_(m.parameters['unicode'],u'iván') - eq_(m.parameters['integer'],10) - eq_(m.parameters['decimal'],.999) - m2 = mapnik.Map(256,256) - for k,v in m.parameters: - m2.parameters.append(mapnik.Parameter(k,v)) - eq_(len(m2.parameters),5) - eq_(m2.parameters['key'],'value2') - eq_(m2.parameters['key3'],'value3') - eq_(m2.parameters['unicode'],u'iván') - eq_(m2.parameters['integer'],10) - eq_(m2.parameters['decimal'],.999) +def test_arbitrary_parameters_attached_to_map(setup): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/extra_arbitary_map_parameters.xml') + assert len(m.parameters) == 5 + assert m.parameters['key'] == 'value2' + assert m.parameters['key3'] == 'value3' + assert m.parameters['unicode'] == u'iván' + assert m.parameters['integer'] == 10 + assert m.parameters['decimal'] == .999 + m2 = mapnik.Map(256, 256) + for k, v in m.parameters.items(): + m2.parameters[k] = v + assert len(m2.parameters) == 5 + assert m2.parameters['key'] == 'value2' + assert m2.parameters['key3'] == 'value3' + assert m2.parameters['unicode'] == u'iván' + assert m2.parameters['integer'] == 10 + assert m2.parameters['decimal'] == .999 map_string = mapnik.save_map_to_string(m) - m3 = mapnik.Map(256,256) - mapnik.load_map_from_string(m3,map_string) - eq_(len(m3.parameters),5) - eq_(m3.parameters['key'],'value2') - eq_(m3.parameters['key3'],'value3') - eq_(m3.parameters['unicode'],u'iván') - eq_(m3.parameters['integer'],10) - eq_(m3.parameters['decimal'],.999) + m3 = mapnik.Map(256, 256) + mapnik.load_map_from_string(m3, map_string) + assert len(m3.parameters) == 5 + assert m3.parameters['key'] == 'value2' + assert m3.parameters['key3'] == 'value3' + assert m3.parameters['unicode'] == u'iván' + assert m3.parameters['integer'] == 10 + assert m3.parameters['decimal'] == .999 def test_serializing_arbitrary_parameters(): - m = mapnik.Map(256,256) - m.parameters.append(mapnik.Parameter('width',m.width)) - m.parameters.append(mapnik.Parameter('height',m.height)) - - m2 = mapnik.Map(1,1) - mapnik.load_map_from_string(m2,mapnik.save_map_to_string(m)) - eq_(m2.parameters['width'],m.width) - eq_(m2.parameters['height'],m.height) + m = mapnik.Map(256, 256) + m.parameters['width'] = m.width + m.parameters['height'] = m.height -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + m2 = mapnik.Map(1, 1) + mapnik.load_map_from_string(m2, mapnik.save_map_to_string(m)) + assert m2.parameters['width'] == m.width + assert m2.parameters['height'] == m.height diff --git a/test/python_tests/feature_id_test.py b/test/python_tests/feature_id_test.py index 66c20ccf4..20e8ad9eb 100644 --- a/test/python_tests/feature_id_test.py +++ b/test/python_tests/feature_id_test.py @@ -1,66 +1,67 @@ -#!/usr/bin/env python +import mapnik +import os +import pytest +try: + import itertools.izip as zip +except ImportError: + pass -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik -import itertools +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def compare_shape_between_mapnik_and_ogr(shapefile,query=None): +def compare_shape_between_mapnik_and_ogr(shapefile, query=None): plugins = mapnik.DatasourceCache.plugin_names() if 'shape' in plugins and 'ogr' in plugins: - ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0) + ds1 = mapnik.Ogr(file=shapefile, layer_by_index=0) ds2 = mapnik.Shapefile(file=shapefile) if query: fs1 = ds1.features(query) fs2 = ds2.features(query) else: - fs1 = ds1.featureset() - fs2 = ds2.featureset() - count = 0; - for feat1,feat2 in itertools.izip(fs1,fs2): + fs1 = iter(ds1) + fs2 = iter(ds2) + count = 0 + for feat1, feat2 in zip(fs1, fs2): count += 1 - eq_(feat1.id(),feat2.id(), - '%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"' - % (count,feat1.id(),str(feat1.attributes), feat2.id(),str(feat2.attributes))) + assert feat1.id() == feat2.id(), '%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"' % (count, feat1.id(), str(feat1.attributes), feat2.id(), str(feat2.attributes)) return True -def test_shapefile_line_featureset_id(): +def test_shapefile_line_featureset_id(setup): compare_shape_between_mapnik_and_ogr('../data/shp/polylines.shp') + def test_shapefile_polygon_featureset_id(): compare_shape_between_mapnik_and_ogr('../data/shp/poly.shp') + def test_shapefile_polygon_feature_query_id(): bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404) query = mapnik.Query(mapnik.Box2d(*bbox)) if 'ogr' in mapnik.DatasourceCache.plugin_names(): - ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0) + ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0) for fld in ds.fields(): query.add_property_name(fld) - compare_shape_between_mapnik_and_ogr('../data/shp/world_merc.shp',query) + compare_shape_between_mapnik_and_ogr( + '../data/shp/world_merc.shp', query) + def test_feature_hit_count(): - pass - #raise Todo("need to optimize multigeom bbox handling in shapeindex: https://github.com/mapnik/mapnik/issues/783") # results in different results between shp and ogr! #bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628) - #bbox = (1113194.91,4512803.085,2226389.82,6739192.905) - #query = mapnik.Query(mapnik.Box2d(*bbox)) - #if 'ogr' in mapnik.DatasourceCache.plugin_names(): - # ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0) - # for fld in ds1.fields(): - # query.add_property_name(fld) - # ds2 = mapnik.Shapefile(file='../data/shp/world_merc.shp') - # count1 = len(ds1.features(query).features) - # count2 = len(ds2.features(query).features) - # eq_(count1,count2,"Feature count differs between OGR driver (%s features) and Shapefile Driver (%s features) when querying the same bbox" % (count1,count2)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + bbox = (1113194.91,4512803.085,2226389.82,6739192.905) + query = mapnik.Query(mapnik.Box2d(*bbox)) + if 'ogr' in mapnik.DatasourceCache.plugin_names(): + ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0) + for fld in ds1.fields(): + query.add_property_name(fld) + ds2 = mapnik.Shapefile(file='../data/shp/world_merc.shp') + count1 = len(list(ds1.features(query))) + count2 = len(list(ds2.features(query))) + assert count1 < count2 # expected 17 and 20 diff --git a/test/python_tests/feature_test.py b/test/python_tests/feature_test.py index 5574cc708..d4f8afc61 100644 --- a/test/python_tests/feature_test.py +++ b/test/python_tests/feature_test.py @@ -1,110 +1,115 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_,raises -from utilities import run_all - -import mapnik from binascii import unhexlify +import mapnik +import pytest def test_default_constructor(): - f = mapnik.Feature(mapnik.Context(),1) - eq_(f is not None,True) + f = mapnik.Feature(mapnik.Context(), 1) + assert f is not None + def test_feature_geo_interface(): ctx = mapnik.Context() - feat = mapnik.Feature(ctx,1) + feat = mapnik.Feature(ctx, 1) feat.geometry = mapnik.Geometry.from_wkt('Point (0 0)') - eq_(feat.__geo_interface__['geometry'],{u'type': u'Point', u'coordinates': [0, 0]}) + assert feat.__geo_interface__['geometry'] == {u'type': u'Point', u'coordinates': [0, 0]} + def test_python_extended_constructor(): context = mapnik.Context() context.push('foo') context.push('foo') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) wkt = 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))' f.geometry = mapnik.Geometry.from_wkt(wkt) f['foo'] = 'bar' - eq_(f['foo'], 'bar') - eq_(f.envelope(),mapnik.Box2d(10.0,10.0,45.0,45.0)) + assert f['foo'] == 'bar' + assert f.envelope(), mapnik.Box2d(10.0, 10.0, 45.0 == 45.0) # reset f['foo'] = u"avión" - eq_(f['foo'], u"avión") + assert f['foo'] == u"avión" f['foo'] = 1.4 - eq_(f['foo'], 1.4) + assert f['foo'] == 1.4 f['foo'] = True - eq_(f['foo'], True) + assert f['foo'] == True + def test_add_geom_wkb(): -# POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10)) + # POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10)) wkb = '010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440' geometry = mapnik.Geometry.from_wkb(unhexlify(wkb)) - eq_(geometry.is_valid(), True) - eq_(geometry.is_simple(), True) - eq_(geometry.envelope(), mapnik.Box2d(10.0,10.0,40.0,40.0)) + if hasattr(geometry, 'is_valid'): + # Those are only available when python-mapnik has been built with + # boost >= 1.56. + assert geometry.is_valid() == True + assert geometry.is_simple() == True + assert geometry.envelope(), mapnik.Box2d(10.0, 10.0, 40.0 == 40.0) geometry.correct() - # valid after calling correct - eq_(geometry.is_valid(), True) + if hasattr(geometry, 'is_valid'): + # valid after calling correct + assert geometry.is_valid() == True + def test_feature_expression_evaluation(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['name'] = 'a' - eq_(f['name'],u'a') + assert f['name'] == u'a' expr = mapnik.Expression("[name]='a'") evaluated = expr.evaluate(f) - eq_(evaluated,True) + assert evaluated == True num_attributes = len(f) - eq_(num_attributes,1) - eq_(f.id(),1) + assert num_attributes == 1 + assert f.id() == 1 # https://github.com/mapnik/mapnik/issues/933 + + def test_feature_expression_evaluation_missing_attr(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['name'] = u'a' - eq_(f['name'],u'a') + assert f['name'] == u'a' expr = mapnik.Expression("[fielddoesnotexist]='a'") - eq_(f.has_key('fielddoesnotexist'),False) + assert not 'fielddoesnotexist' in f try: expr.evaluate(f) - except Exception, e: - eq_("Key does not exist" in str(e),True) + except Exception as e: + assert "Key does not exist" in str(e) == True num_attributes = len(f) - eq_(num_attributes,1) - eq_(f.id(),1) + assert num_attributes == 1 + assert f.id() == 1 # https://github.com/mapnik/mapnik/issues/934 + + def test_feature_expression_evaluation_attr_with_spaces(): context = mapnik.Context() context.push('name with space') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['name with space'] = u'a' - eq_(f['name with space'],u'a') + assert f['name with space'] == u'a' expr = mapnik.Expression("[name with space]='a'") - eq_(str(expr),"([name with space]='a')") - eq_(expr.evaluate(f),True) + assert str(expr) == "([name with space]='a')" + assert expr.evaluate(f) == True # https://github.com/mapnik/mapnik/issues/2390 -@raises(RuntimeError) + def test_feature_from_geojson(): - ctx = mapnik.Context() - inline_string = """ - { - "geometry" : { - "coordinates" : [ 0,0 ] - "type" : "Point" - }, - "type" : "Feature", - "properties" : { - "this":"that" - "known":"nope because missing comma" - } - } - """ - mapnik.Feature.from_geojson(inline_string,ctx) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + with pytest.raises(RuntimeError): + ctx = mapnik.Context() + inline_string = """ + { + "geometry" : { + "coordinates" : [ 0,0 ] + "type" : "Point" + }, + "type" : "Feature", + "properties" : { + "this":"that" + "known":"nope because missing comma" + } + } + """ + mapnik.Feature.from_geojson(inline_string, ctx) diff --git a/test/python_tests/filter_test.py b/test/python_tests/filter_test.py index 34845cea1..641d6950f 100644 --- a/test/python_tests/filter_test.py +++ b/test/python_tests/filter_test.py @@ -1,11 +1,7 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_,raises -from utilities import run_all import mapnik +import pytest -if hasattr(mapnik,'Expression'): +if hasattr(mapnik, 'Expression'): mapnik.Filter = mapnik.Expression map_ = ''' @@ -50,9 +46,10 @@ ''' + def test_filter_init(): - m = mapnik.Map(1,1) - mapnik.load_map_from_string(m,map_) + m = mapnik.Map(1, 1) + mapnik.load_map_from_string(m, map_) filters = [] filters.append(mapnik.Filter("([region]>=0) and ([region]<=50)")) filters.append(mapnik.Filter("(([region]>=0) and ([region]<=50))")) @@ -86,366 +83,390 @@ def test_filter_init(): first = filters[0] for f in filters: - eq_(str(first),str(f)) + assert str(first) == str(f) s = m.find_style('s2') - eq_(s.filter_mode,mapnik.filter_mode.FIRST) + assert s.filter_mode == mapnik.filter_mode.FIRST def test_geometry_type_eval(): # clashing field called 'mapnik::geometry' context2 = mapnik.Context() context2.push('mapnik::geometry_type') - f = mapnik.Feature(context2,0) + f = mapnik.Feature(context2, 0) f["mapnik::geometry_type"] = 'sneaky' expr = mapnik.Expression("[mapnik::geometry_type]") - eq_(expr.evaluate(f),0) + assert expr.evaluate(f) == 0 expr = mapnik.Expression("[mapnik::geometry_type]") context = mapnik.Context() # no geometry - f = mapnik.Feature(context,0) - eq_(expr.evaluate(f),0) - eq_(mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f),True) + f = mapnik.Feature(context, 0) + assert expr.evaluate(f) == 0 + assert mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f) # POINT = 1 - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f.geometry = mapnik.Geometry.from_wkt('POINT(10 40)') - eq_(expr.evaluate(f),1) - eq_(mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f),True) + assert expr.evaluate(f) == 1 + assert mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f) # LINESTRING = 2 - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f.geometry = mapnik.Geometry.from_wkt('LINESTRING (30 10, 10 30, 40 40)') - eq_(expr.evaluate(f),2) - eq_(mapnik.Expression("[mapnik::geometry_type] = linestring").evaluate(f),True) + assert expr.evaluate(f) == 2 + assert mapnik.Expression("[mapnik::geometry_type] = linestring").evaluate(f) # POLYGON = 3 - f = mapnik.Feature(context,0) - f.geometry = mapnik.Geometry.from_wkt('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))') - eq_(expr.evaluate(f),3) - eq_(mapnik.Expression("[mapnik::geometry_type] = polygon").evaluate(f),True) + f = mapnik.Feature(context, 0) + f.geometry = mapnik.Geometry.from_wkt( + 'POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))') + assert expr.evaluate(f) == 3 + assert mapnik.Expression("[mapnik::geometry_type] = polygon").evaluate(f) # COLLECTION = 4 - f = mapnik.Feature(context,0) - geom = mapnik.Geometry.from_wkt('GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))') - f.geometry = geom; - eq_(expr.evaluate(f),4) - eq_(mapnik.Expression("[mapnik::geometry_type] = collection").evaluate(f),True) + f = mapnik.Feature(context, 0) + geom = mapnik.Geometry.from_wkt( + 'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))') + f.geometry = geom + assert expr.evaluate(f) == 4 + assert mapnik.Expression("[mapnik::geometry_type] = collection").evaluate(f) + def test_regex_match(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["name"] = 'test' expr = mapnik.Expression("[name].match('test')") - eq_(expr.evaluate(f),True) # 1 == True + assert expr.evaluate(f) # 1 == True + def test_unicode_regex_match(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["name"] = 'Québec' expr = mapnik.Expression("[name].match('Québec')") - eq_(expr.evaluate(f),True) # 1 == True + assert expr.evaluate(f) # 1 == True + def test_regex_replace(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["name"] = 'test' - expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')") - eq_(expr.evaluate(f),'t e s t') + expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')") + assert expr.evaluate(f) == 't e s t' + def test_unicode_regex_replace_to_str(): - expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')") - eq_(str(expr),"[name].replace('(\B)|( )','$1 ')") + expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')") + assert str(expr), "[name].replace('(\\B)|( )' == '$1 ')" + def test_unicode_regex_replace(): context = mapnik.Context() context.push('name') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["name"] = 'Québec' - expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')") + expr = mapnik.Expression("[name].replace('(\\B)|( )','$1 ')") # will fail if -DBOOST_REGEX_HAS_ICU is not defined - eq_(expr.evaluate(f), u'Q u é b e c') + assert expr.evaluate(f) == u'Q u é b e c' + def test_float_precision(): context = mapnik.Context() context.push('num') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["num1"] = 1.0000 f["num2"] = 1.0001 - eq_(f["num1"],1.0000) - eq_(f["num2"],1.0001) + assert f["num1"] == 1.0000 + assert f["num2"] == 1.0001 expr = mapnik.Expression("[num1] = 1.0000") - eq_(expr.evaluate(f),True) + assert expr.evaluate(f) expr = mapnik.Expression("[num1].match('1')") - eq_(expr.evaluate(f),True) + assert expr.evaluate(f) expr = mapnik.Expression("[num2] = 1.0001") - eq_(expr.evaluate(f),True) + assert expr.evaluate(f) expr = mapnik.Expression("[num2].match('1.0001')") - eq_(expr.evaluate(f),True) + assert expr.evaluate(f) + def test_string_matching_on_precision(): context = mapnik.Context() context.push('num') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["num"] = "1.0000" - eq_(f["num"],"1.0000") + assert f["num"] == "1.0000" expr = mapnik.Expression("[num].match('.*(^0|00)$')") - eq_(expr.evaluate(f),True) + assert expr.evaluate(f) + def test_creation_of_null_value(): context = mapnik.Context() context.push('nv') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["nv"] = None - eq_(f["nv"],None) - eq_(f["nv"] is None,True) + assert f["nv"] == None + assert f["nv"] is None # test boolean f["nv"] = 0 - eq_(f["nv"],0) - eq_(f["nv"] is not None,True) + assert f["nv"] == 0 + assert f["nv"] is not None + def test_creation_of_bool(): context = mapnik.Context() context.push('bool') - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["bool"] = True - eq_(f["bool"],True) + assert f["bool"] # TODO - will become int of 1 do to built in boost python conversion # https://github.com/mapnik/mapnik/issues/1873 - eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),True) + assert isinstance(f["bool"], bool) or isinstance(f["bool"], int) f["bool"] = False - eq_(f["bool"],False) - eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),True) + assert f["bool"] == False + assert isinstance(f["bool"], bool) or isinstance(f["bool"], int) # test NoneType f["bool"] = None - eq_(f["bool"],None) - eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),False) + assert f["bool"] == None + assert not isinstance(f["bool"], bool) or isinstance(f["bool"], int) # test integer f["bool"] = 0 - eq_(f["bool"],0) + assert f["bool"] == 0 # https://github.com/mapnik/mapnik/issues/1873 # ugh, boost_python's built into converter does not work right - #eq_(isinstance(f["bool"],bool),False) + # assert isinstance(f["bool"],bool) == False null_equality = [ - ['hello',False,unicode], - [u'',False,unicode], - [0,False,long], - [123,False,long], - [0.0,False,float], - [123.123,False,float], - [.1,False,float], - [False,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 - [True,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 - [None,True,None], - [2147483648,False,long], - [922337203685477580,False,long] + ['hello', False, str], + [u'', False, str], + [0, False, int], + [123, False, int], + [0.0, False, float], + [123.123, False, float], + [.1, False, float], + # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 + [False, False, int], + # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 + [True, False, int], + [None, True, None], + [2147483648, False, int], + [922337203685477580, False, int] ] + def test_expressions_with_null_equality(): for eq in null_equality: context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = eq[0] - eq_(f["prop"],eq[0]) + assert f["prop"] == eq[0] if eq[0] is None: - eq_(f["prop"] is None, True) + assert f["prop"] is None else: - eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2])) + assert isinstance(f['prop'], eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2]) expr = mapnik.Expression("[prop] = null") - eq_(expr.evaluate(f),eq[1]) + assert expr.evaluate(f) == eq[1] expr = mapnik.Expression("[prop] is null") - eq_(expr.evaluate(f),eq[1]) + assert expr.evaluate(f) == eq[1] + def test_expressions_with_null_equality2(): for eq in null_equality: context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = eq[0] - eq_(f["prop"],eq[0]) + assert f["prop"] == eq[0] if eq[0] is None: - eq_(f["prop"] is None, True) + assert f["prop"] is None else: - eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2])) + assert isinstance(f['prop'], eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2]) # TODO - support `is not` syntax: # https://github.com/mapnik/mapnik/issues/796 expr = mapnik.Expression("not [prop] is null") - eq_(expr.evaluate(f),not eq[1]) + assert not expr.evaluate(f) == eq[1] # https://github.com/mapnik/mapnik/issues/1642 expr = mapnik.Expression("[prop] != null") - eq_(expr.evaluate(f),not eq[1]) + assert not expr.evaluate(f) == eq[1] truthyness = [ - [u'hello',True,unicode], - [u'',False,unicode], - [0,False,long], - [123,True,long], - [0.0,False,float], - [123.123,True,float], - [.1,True,float], - [False,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 - [True,True,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 - [None,False,None], - [2147483648,True,long], - [922337203685477580,True,long] + [u'hello', True, str], + [u'', False, str], + [0, False, int], + [123, True, int], + [0.0, False, float], + [123.123, True, float], + [.1, True, float], + # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 + [False, False, int], + # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873 + [True, True, int], + [None, False, None], + [2147483648, True, int], + [922337203685477580, True, int] ] + def test_expressions_for_thruthyness(): context = mapnik.Context() for eq in truthyness: - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = eq[0] - eq_(f["prop"],eq[0]) + assert f["prop"] == eq[0] if eq[0] is None: - eq_(f["prop"] is None, True) + assert f["prop"] is None else: - eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2])) + assert isinstance(f['prop'], eq[2]), '%s is not an instance of %s' % (f['prop'], eq[2]) expr = mapnik.Expression("[prop]") - eq_(expr.to_bool(f),eq[1]) + assert expr.to_bool(f) == eq[1] expr = mapnik.Expression("not [prop]") - eq_(expr.to_bool(f),not eq[1]) + assert not expr.to_bool(f) == eq[1] expr = mapnik.Expression("! [prop]") - eq_(expr.to_bool(f),not eq[1]) + assert not expr.to_bool(f) == eq[1] # also test if feature does not have property at all - f2 = mapnik.Feature(context,1) + f2 = mapnik.Feature(context, 1) # no property existing will return value_null since # https://github.com/mapnik/mapnik/commit/562fada9d0f680f59b2d9f396c95320a0d753479#include/mapnik/feature.hpp - eq_(f2["prop"] is None,True) + assert f2["prop"] is None expr = mapnik.Expression("[prop]") - eq_(expr.evaluate(f2),None) - eq_(expr.to_bool(f2),False) + assert expr.evaluate(f2) == None + assert expr.to_bool(f2) == False # https://github.com/mapnik/mapnik/issues/1859 + + def test_if_null_and_empty_string_are_equal(): context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["empty"] = u"" f["null"] = None # ensure base assumptions are good - eq_(mapnik.Expression("[empty] = ''").to_bool(f),True) - eq_(mapnik.Expression("[null] = null").to_bool(f),True) - eq_(mapnik.Expression("[empty] != ''").to_bool(f),False) - eq_(mapnik.Expression("[null] != null").to_bool(f),False) + assert mapnik.Expression("[empty] = ''").to_bool(f) + assert mapnik.Expression("[null] = null").to_bool(f) + assert not mapnik.Expression("[empty] != ''").to_bool(f) + assert not mapnik.Expression("[null] != null").to_bool(f) # now test expected behavior - eq_(mapnik.Expression("[null] = ''").to_bool(f),False) - eq_(mapnik.Expression("[empty] = null").to_bool(f),False) - eq_(mapnik.Expression("[empty] != null").to_bool(f),True) + assert not mapnik.Expression("[null] = ''").to_bool(f) + assert not mapnik.Expression("[empty] = null").to_bool(f) + assert mapnik.Expression("[empty] != null").to_bool(f) # this one is the back compatibility shim - eq_(mapnik.Expression("[null] != ''").to_bool(f),False) + assert not mapnik.Expression("[null] != ''").to_bool(f) + def test_filtering_nulls_and_empty_strings(): context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = u"hello" - eq_(f["prop"],u"hello") - eq_(mapnik.Expression("[prop]").to_bool(f),True) - eq_(mapnik.Expression("! [prop]").to_bool(f),False) - eq_(mapnik.Expression("[prop] != null").to_bool(f),True) - eq_(mapnik.Expression("[prop] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f),True) + assert f["prop"] == u"hello" + assert mapnik.Expression("[prop]").to_bool(f) + assert not mapnik.Expression("! [prop]").to_bool(f) + assert mapnik.Expression("[prop] != null").to_bool(f) + assert mapnik.Expression("[prop] != ''").to_bool(f) + assert mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f) + assert mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f) f["prop2"] = u"" - eq_(f["prop2"],u"") - eq_(mapnik.Expression("[prop2]").to_bool(f),False) - eq_(mapnik.Expression("! [prop2]").to_bool(f),True) - eq_(mapnik.Expression("[prop2] != null").to_bool(f),True) - eq_(mapnik.Expression("[prop2] != ''").to_bool(f),False) - eq_(mapnik.Expression("[prop2] = ''").to_bool(f),True) - eq_(mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop2] != null and [prop2] != ''").to_bool(f),False) + assert f["prop2"] == u"" + assert not mapnik.Expression("[prop2]").to_bool(f) + assert mapnik.Expression("! [prop2]").to_bool(f) + assert mapnik.Expression("[prop2] != null").to_bool(f) + assert not mapnik.Expression("[prop2] != ''").to_bool(f) + assert mapnik.Expression("[prop2] = ''").to_bool(f) + assert mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f) + assert not mapnik.Expression("[prop2] != null and [prop2] != ''").to_bool(f) f["prop3"] = None - eq_(f["prop3"],None) - eq_(mapnik.Expression("[prop3]").to_bool(f),False) - eq_(mapnik.Expression("! [prop3]").to_bool(f),True) - eq_(mapnik.Expression("[prop3] != null").to_bool(f),False) - eq_(mapnik.Expression("[prop3] = null").to_bool(f),True) + assert f["prop3"] == None + assert not mapnik.Expression("[prop3]").to_bool(f) + assert mapnik.Expression("! [prop3]").to_bool(f) + assert not mapnik.Expression("[prop3] != null").to_bool(f) + assert mapnik.Expression("[prop3] = null").to_bool(f) # https://github.com/mapnik/mapnik/issues/1859 - #eq_(mapnik.Expression("[prop3] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop3] != ''").to_bool(f),False) + #assert mapnik.Expression("[prop3] != ''").to_bool(f) == True + assert not mapnik.Expression("[prop3] != ''").to_bool(f) - eq_(mapnik.Expression("[prop3] = ''").to_bool(f),False) + assert not mapnik.Expression("[prop3] = ''").to_bool(f) # https://github.com/mapnik/mapnik/issues/1859 - #eq_(mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f),False) + #assert mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f) == True + assert not mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f) - eq_(mapnik.Expression("[prop3] != null and [prop3] != ''").to_bool(f),False) + assert not mapnik.Expression("[prop3] != null and [prop3] != ''").to_bool(f) # attr not existing should behave the same as prop3 - eq_(mapnik.Expression("[prop4]").to_bool(f),False) - eq_(mapnik.Expression("! [prop4]").to_bool(f),True) - eq_(mapnik.Expression("[prop4] != null").to_bool(f),False) - eq_(mapnik.Expression("[prop4] = null").to_bool(f),True) + assert not mapnik.Expression("[prop4]").to_bool(f) + assert mapnik.Expression("! [prop4]").to_bool(f) + assert not mapnik.Expression("[prop4] != null").to_bool(f) + assert mapnik.Expression("[prop4] = null").to_bool(f) # https://github.com/mapnik/mapnik/issues/1859 - ##eq_(mapnik.Expression("[prop4] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop4] != ''").to_bool(f),False) + ##assert mapnik.Expression("[prop4] != ''").to_bool(f) == True + assert not mapnik.Expression("[prop4] != ''").to_bool(f) - eq_(mapnik.Expression("[prop4] = ''").to_bool(f),False) + assert not mapnik.Expression("[prop4] = ''").to_bool(f) # https://github.com/mapnik/mapnik/issues/1859 - ##eq_(mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f),False) + ##assert mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f) == True + assert not mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f) - eq_(mapnik.Expression("[prop4] != null and [prop4] != ''").to_bool(f),False) + assert not mapnik.Expression("[prop4] != null and [prop4] != ''").to_bool(f) f["prop5"] = False - eq_(f["prop5"],False) - eq_(mapnik.Expression("[prop5]").to_bool(f),False) - eq_(mapnik.Expression("! [prop5]").to_bool(f),True) - eq_(mapnik.Expression("[prop5] != null").to_bool(f),True) - eq_(mapnik.Expression("[prop5] = null").to_bool(f),False) - eq_(mapnik.Expression("[prop5] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop5] = ''").to_bool(f),False) - eq_(mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f),True) - eq_(mapnik.Expression("[prop5] != null and [prop5] != ''").to_bool(f),True) + assert f["prop5"] == False + assert not mapnik.Expression("[prop5]").to_bool(f) + assert mapnik.Expression("! [prop5]").to_bool(f) + assert mapnik.Expression("[prop5] != null").to_bool(f) + assert not mapnik.Expression("[prop5] = null").to_bool(f) + assert mapnik.Expression("[prop5] != ''").to_bool(f) + assert not mapnik.Expression("[prop5] = ''").to_bool(f) + assert mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f) + assert mapnik.Expression("[prop5] != null and [prop5] != ''").to_bool(f) # note, we need to do [prop5] != 0 here instead of false due to this bug: # https://github.com/mapnik/mapnik/issues/1873 - eq_(mapnik.Expression("[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f),False) + assert not mapnik.Expression("[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f) # https://github.com/mapnik/mapnik/issues/1872 + + def test_falseyness_comparision(): context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = 0 - eq_(mapnik.Expression("[prop]").to_bool(f),False) - eq_(mapnik.Expression("[prop] = false").to_bool(f),True) - eq_(mapnik.Expression("not [prop] != false").to_bool(f),True) - eq_(mapnik.Expression("not [prop] = true").to_bool(f),True) - eq_(mapnik.Expression("[prop] = true").to_bool(f),False) - eq_(mapnik.Expression("[prop] != true").to_bool(f),True) - -# https://github.com/mapnik/mapnik/issues/1806, fixed by https://github.com/mapnik/mapnik/issues/1872 + assert not mapnik.Expression("[prop]").to_bool(f) + assert mapnik.Expression("[prop] = false").to_bool(f) + assert mapnik.Expression("not [prop] != false").to_bool(f) + assert mapnik.Expression("not [prop] = true").to_bool(f) + assert not mapnik.Expression("[prop] = true").to_bool(f) + assert mapnik.Expression("[prop] != true").to_bool(f) + +# https://github.com/mapnik/mapnik/issues/1806, fixed by +# https://github.com/mapnik/mapnik/issues/1872 + + def test_truthyness_comparision(): context = mapnik.Context() - f = mapnik.Feature(context,0) + f = mapnik.Feature(context, 0) f["prop"] = 1 - eq_(mapnik.Expression("[prop]").to_bool(f),True) - eq_(mapnik.Expression("[prop] = false").to_bool(f),False) - eq_(mapnik.Expression("not [prop] != false").to_bool(f),False) - eq_(mapnik.Expression("not [prop] = true").to_bool(f),False) - eq_(mapnik.Expression("[prop] = true").to_bool(f),True) - eq_(mapnik.Expression("[prop] != true").to_bool(f),False) + assert mapnik.Expression("[prop]").to_bool(f) == True + assert mapnik.Expression("[prop] = false").to_bool(f) == False + assert mapnik.Expression("not [prop] != false").to_bool(f) == False + assert mapnik.Expression("not [prop] = true").to_bool(f) == False + assert mapnik.Expression("[prop] = true").to_bool(f) == True + assert mapnik.Expression("[prop] != true").to_bool(f) == False + def test_division_by_zero(): expr = mapnik.Expression('[a]/[b]') c = mapnik.Context() c.push('a') c.push('b') - f = mapnik.Feature(c,0); + f = mapnik.Feature(c, 0) f['a'] = 1 f['b'] = 0 - eq_(expr.evaluate(f),None) - -@raises(RuntimeError) -def test_invalid_syntax1(): - mapnik.Expression('abs()') + assert expr.evaluate(f) == None -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +def test_invalid_syntax1(): + with pytest.raises(RuntimeError): + mapnik.Expression('abs()') diff --git a/test/python_tests/fontset_test.py b/test/python_tests/fontset_test.py index ee8fd7d77..72915e67d 100644 --- a/test/python_tests/fontset_test.py +++ b/test/python_tests/fontset_test.py @@ -1,41 +1,40 @@ -#!/usr/bin/env python +import os +import mapnik +import pytest -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_loading_fontset_from_map(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/fontset.xml',True) + +def test_loading_fontset_from_map(setup): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/fontset.xml', True) fs = m.find_fontset('book-fonts') - eq_(len(fs.names),2) - eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) + assert len(fs.names) == 2 + assert list(fs.names) == ['DejaVu Sans Book', 'DejaVu Sans Oblique'] # def test_loading_fontset_from_python(): # m = mapnik.Map(256,256) # fset = mapnik.FontSet('foo') # fset.add_face_name('Comic Sans') # fset.add_face_name('Papyrus') -# eq_(fset.name,'foo') +# assert fset.name == 'foo' # fset.name = 'my-set' -# eq_(fset.name,'my-set') +# assert fset.name == 'my-set' # m.append_fontset('my-set', fset) # sty = mapnik.Style() # rule = mapnik.Rule() # tsym = mapnik.TextSymbolizer() -# eq_(tsym.fontset,None) +# assert tsym.fontset == None # tsym.fontset = fset # rule.symbols.append(tsym) # sty.rules.append(rule) # m.append_style('Style',sty) # serialized_map = mapnik.save_map_to_string(m) -# eq_('fontset-name="my-set"' in serialized_map,True) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +# assert 'fontset-name="my-set"' in serialized_map == True diff --git a/test/python_tests/geojson_plugin_test.py b/test/python_tests/geojson_plugin_test.py index ef7c74ab7..738cd5523 100644 --- a/test/python_tests/geojson_plugin_test.py +++ b/test/python_tests/geojson_plugin_test.py @@ -1,110 +1,123 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +import os +import mapnik +import pytest -from nose.tools import eq_,assert_almost_equal -from utilities import execution_path, run_all -import os, mapnik -try: - import json -except ImportError: - import simplejson as json +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'geojson' in mapnik.DatasourceCache.plugin_names(): - def test_geojson_init(): - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') + def test_geojson_init(setup): + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson') e = ds.envelope() - assert_almost_equal(e.minx, -81.705583, places=7) - assert_almost_equal(e.miny, 41.480573, places=6) - assert_almost_equal(e.maxx, -81.705583, places=5) - assert_almost_equal(e.maxy, 41.480573, places=3) + assert e.minx == pytest.approx(-81.705583, abs=1e-7) + assert e.miny == pytest.approx(41.480573, abs=1e-6) + assert e.maxx == pytest.approx(-81.705583, abs=1e-5) + assert e.maxy == pytest.approx(41.480573, abs=1e-3) def test_geojson_properties(): - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') - f = ds.features_at_point(ds.envelope().center()).features[0] - eq_(len(ds.fields()),7) + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson') + f = list(ds.features_at_point(ds.envelope().center()))[0] + assert len(ds.fields()) == 11 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' + + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson') + f = list(iter(ds))[0] + assert len(ds.fields()) == 11 - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') + desc = ds.describe() + assert desc['geometry_type'] == mapnik.DataGeometryType.Point - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') - f = ds.all_features()[0] - eq_(len(ds.fields()),7) + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' - desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') def test_large_geojson_properties(): - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson',cache_features = False) - f = ds.features_at_point(ds.envelope().center()).features[0] - eq_(len(ds.fields()),7) + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson', + cache_features=False) + f = list(ds.features_at_point(ds.envelope().center()))[0] + assert len(ds.fields()) == 11 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') - - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') - f = ds.all_features()[0] - eq_(len(ds.fields()),7) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' + + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson') + f = list(iter(ds))[0] + assert len(ds.fields()) == 11 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' def test_geojson_from_in_memory_string(): # will silently fail since it is a geometry and needs to be a featurecollection. #ds = mapnik.Datasource(type='geojson',inline='{"type":"LineString","coordinates":[[0,0],[10,10]]}') # works since it is a featurecollection - ds = mapnik.Datasource(type='geojson',inline='{ "type":"FeatureCollection", "features": [ { "type":"Feature", "properties":{"name":"test"}, "geometry": { "type":"LineString","coordinates":[[0,0],[10,10]] } } ]}') - eq_(len(ds.fields()),1) - f = ds.all_features()[0] + ds = mapnik.Datasource( + type='geojson', + inline='{ "type":"FeatureCollection", "features": [ { "type":"Feature", "properties":{"name":"test"}, "geometry": { "type":"LineString","coordinates":[[0,0],[10,10]] } } ]}') + assert len(ds.fields()) == 1 + f = list(iter(ds))[0] desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.LineString) - eq_(f['name'], u'test') + assert desc['geometry_type'] == mapnik.DataGeometryType.LineString + assert f['name'] == u'test' # @raises(RuntimeError) def test_that_nonexistant_query_field_throws(**kwargs): - ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson') - eq_(len(ds.fields()),7) + ds = mapnik.Datasource( + type='geojson', + file='../data/json/escaped.geojson') + assert len(ds.fields()) == 11 # TODO - this sorting is messed up - #eq_(ds.fields(),['name', 'int', 'double', 'description', 'boolean', 'NOM_FR']) - #eq_(ds.field_types(),['str', 'int', 'float', 'str', 'bool', 'str']) + #assert ds.fields(),['name', 'int', 'double', 'description', 'boolean' == 'NOM_FR'] + #assert ds.field_types(),['str', 'int', 'float', 'str', 'bool' == 'str'] # TODO - should geojson plugin throw like others? # query = mapnik.Query(ds.envelope()) # for fld in ds.fields(): @@ -114,13 +127,11 @@ def test_that_nonexistant_query_field_throws(**kwargs): # fs = ds.features(query) def test_parsing_feature_collection_with_top_level_properties(): - ds = mapnik.Datasource(type='geojson',file='../data/json/feature_collection_level_properties.json') - f = ds.all_features()[0] + ds = mapnik.Datasource( + type='geojson', + file='../data/json/feature_collection_level_properties.json') + f = list(iter(ds))[0] desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - eq_(f['feat_name'], u'feat_value') - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point + assert f['feat_name'] == u'feat_value' diff --git a/test/python_tests/geometry_io_test.py b/test/python_tests/geometry_io_test.py index 58e4f3659..f24b90198 100644 --- a/test/python_tests/geometry_io_test.py +++ b/test/python_tests/geometry_io_test.py @@ -1,52 +1,75 @@ -#encoding: utf8 - -from nose.tools import eq_,raises -import os -from utilities import execution_path, run_all -import mapnik from binascii import unhexlify - +import mapnik +import pytest try: import json except ImportError: import simplejson as json -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) wkts = [ - [mapnik.GeometryType.Point,"POINT(30 10)", "01010000000000000000003e400000000000002440"], - [mapnik.GeometryType.Point,"POINT(30.0 10.0)", "01010000000000000000003e400000000000002440"], - [mapnik.GeometryType.Point,"POINT(30.1 10.1)", "01010000009a99999999193e403333333333332440"], - [mapnik.GeometryType.LineString,"LINESTRING(30 10,10 30,40 40)", "0102000000030000000000000000003e40000000000000244000000000000024400000000000003e4000000000000044400000000000004440"], - [mapnik.GeometryType.Polygon,"POLYGON((30 10,10 20,20 40,40 40,30 10))", "010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440"], - [mapnik.GeometryType.Polygon,"POLYGON((35 10,10 20,15 40,45 45,35 10),(20 30,35 35,30 20,20 30))","0103000000020000000500000000000000008041400000000000002440000000000000244000000000000034400000000000002e40000000000000444000000000008046400000000000804640000000000080414000000000000024400400000000000000000034400000000000003e40000000000080414000000000008041400000000000003e40000000000000344000000000000034400000000000003e40"], - [mapnik.GeometryType.MultiPoint,"MULTIPOINT((10 40),(40 30),(20 20),(30 10))","010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003e4001010000000000000000003440000000000000344001010000000000000000003e400000000000002440"], - [mapnik.GeometryType.MultiLineString,"MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10))","010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440"], - [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((30 20,10 40,45 40,30 20)),((15 5,40 10,10 20,5 10,15 5)))","010600000002000000010300000001000000040000000000000000003e40000000000000344000000000000024400000000000004440000000000080464000000000000044400000000000003e400000000000003440010300000001000000050000000000000000002e4000000000000014400000000000004440000000000000244000000000000024400000000000003440000000000000144000000000000024400000000000002e400000000000001440"], - [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)))","01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003e40000000000000444000000000000044400103000000020000000600000000000000000034400000000000804140000000000080464000000000000034400000000000003e4000000000000014400000000000002440000000000000244000000000000024400000000000003e4000000000000034400000000000804140040000000000000000003e4000000000000034400000000000003440000000000000394000000000000034400000000000002e400000000000003e400000000000003440"], - [mapnik.GeometryType.GeometryCollection,"GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))","01070000000300000001030000000100000005000000000000000000f03f000000000000f03f0000000000000040000000000000f03f00000000000000400000000000000040000000000000f03f0000000000000040000000000000f03f000000000000f03f0101000000000000000000004000000000000008400102000000020000000000000000000040000000000000084000000000000008400000000000001040"], - [mapnik.GeometryType.Polygon,"POLYGON((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365),(-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878 71.39643,-177.779837 71.333197,-177.718375 71.305243,-177.706412 71.3039,-177.68212 71.304877,-177.670279 71.301825,-177.655387 71.293158,-177.587577 71.285956,-177.548575 71.294867,-177.531119 71.296332,-177.51409 71.293402,-177.498649 71.284735,-177.506217 71.268622,-177.486991 71.258734,-177.459708 71.249884,-177.443412 71.237006,-177.445914 71.222663,-177.457755 71.209357,-177.507804 71.173774,-177.581168 71.147589,-177.637626 71.117011,-177.684134 71.110968,-177.751883 71.092963,-177.819266 71.084662,-177.877677 71.052558,-177.930472 71.041449,-178.206595 71.038398,-178.310111 71.013617,-178.875907 70.981024,-178.980277 70.95069,-179.342093 70.908026,-179.336234 70.911078,-179.322257 70.921698,-179.364493 70.930243,-179.457511 70.915534,-179.501212 70.919684,-179.666007 70.965461,-179.853385 70.979438,-179.888785 70.993598,-179.907523 70.996772,-179.999989 70.992011,-179.999989 71.024848,-179.999989 71.058661,-179.999989 71.126166,-179.999989 71.187018,-179.999989 71.224189,-179.999989 71.27497,-179.999989 71.312079,-179.999989 71.356024,-179.999989 71.410041,-179.999989 71.487799,-179.999989 71.536689,-179.862845 71.538642,-179.912223 71.555854,-179.900748 71.558478,-179.798819 71.569098,-179.757438 71.583197,-179.735953 71.586432,-179.715445 71.583258,-179.697501 71.577338,-179.678702 71.573676,-179.610831 71.585211,-179.372062 71.569098,-179.326774 71.555487,-179.306815 71.557563,-179.287162 71.562934,-179.24285 71.569098,-179.204642 71.583197,-179.074576 71.600043,-178.395438 71.539008,-178.32319 71.518365))","010300000002000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140944c4eed8c4066c0dfc14f1c40dc51409a97c3eebb4166c0a2cf471971db5140e789e76c814166c08c81751c3fdb5140266daaee114166c0f321a81abdda51404b3fe1ecd64066c0ce55f31c91da514058c6866ef64066c029cfbc1c76da5140295b24ed464166c0cbf8f71917da5140fd4cbd6e113f66c0cf49ef1b5fd95140be66b96cf43866c00588821953d55140736891edfc3666c04f95ef1989d3514010b056ed9a3666c04850fc1873d35140527e52edd33566c05e13d21a83d351404206f2ec723566c05f07ce1951d35140276728eef83466c0e4a3c519c3d2514038dc476ecd3266c01406651a4dd25140e78c28ed8d3166c0db6ad619dfd2514018e945edfe3066c088307e1af7d251406c26df6c733066c02fa52e19c7d25140dc65bfeef42f66c0b341261939d25140bc75feed323066c01c3f541a31d15140484e266e952f66c040170d198fd05140b5368dedb52e66c0cbf27519fecf5140edd45c6e302e66c02bbd361b2bcf514083da6fed442e66c0dfc14f1c40ce51409352d0eda52e66c0130ce71a66cd5140e5982cee3f3066c049b9fb1c1fcb51406a2fa2ed983266c0f50f221972c951408a22a46e673466c06d91b41b7dc751409d82fc6ce43566c0c3b986191ac7514063edef6c0f3866c03c2f151bf3c551403197546d373a66c0b14d2a1a6bc55140726e13ee153c66c02ac93a1c5dc35140003b376dc63d66c0ecdcb419a7c251408c101e6d9c4666c0d68ee21c75c251402864e76dec4966c0db6ad619dfc05140cdea1d6e075c66c0d525e318c9be5140ac00df6d5e5f66c097a8de1ad8bc514019e6046df26a66c0bc7a15191dba5140b439ce6dc26a66c0ba86191a4fba5140077de9ed4f6a66c079b29b19fdba5140df5339eda96b66c04f95ef1989bb5140eed11beea36e66c01118eb1b98ba51409c8bbfed097066c0e3a9471adcba5140077de9ed4f7566c01024ef1ccabd5140b43c0fee4e7b66c06b9db81cafbe51409d853ded707c66c0bfd7101c97bf5140c5abac6d0a7d66c06364c91ccbbf5140836beee8ff7f66c06d91b41b7dbf5140836beee8ff7f66c0bfd7101c97c15140836beee8ff7f66c03e23111ac1c35140836beee8ff7f66c07ff78e1a13c85140836beee8ff7f66c0da70581af8cb5140836beee8ff7f66c0dec7d11c59ce5140836beee8ff7f66c06458c51b99d15140836beee8ff7f66c02db1321af9d35140836beee8ff7f66c0d525e318c9d65140836beee8ff7f66c03a419b1c3eda5140836beee8ff7f66c060014c1938df5140836beee8ff7f66c0dec7d11c59e251408c101e6d9c7b66c021904b1c79e2514017f549ee307d66c073d6a71c93e35140ff3d78edd27c66c04562821abee351408d0dddec8f7966c0048e041a6ce45140edd79dee3c7866c00588821953e55140944c4eed8c7766c0fc54151a88e55140c6a2e9ece47666c058c85c1954e55140acfd9ded517666c03c2f151bf3e451405ab741edb77566c002a08a1bb7e45140410c74ed8b7366c09b90d61874e55140b34291eee76b66c0048e041a6ce45140dc65bfee746a66c09a9658198de3514083ddb06dd16966c06b9db81cafe35140edd45c6e306966c09df3531c07e45140adfa5c6dc56766c0048e041a6ce451406b2c616d8c6666c00588821953e55140212235ed626266c0664cc11a67e6514010b3976da74c66c01212691b7fe251409e0c8e92574a66c0079964e42ce15140"], - [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365)),((-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878 71.39643,-177.779837 71.333197,-177.718375 71.305243,-177.706412 71.3039,-177.68212 71.304877,-177.670279 71.301825,-177.655387 71.293158,-177.587577 71.285956,-177.548575 71.294867,-177.531119 71.296332,-177.51409 71.293402,-177.498649 71.284735,-177.506217 71.268622,-177.486991 71.258734,-177.459708 71.249884,-177.443412 71.237006,-177.445914 71.222663,-177.457755 71.209357,-177.507804 71.173774,-177.581168 71.147589,-177.637626 71.117011,-177.684134 71.110968,-177.751883 71.092963,-177.819266 71.084662,-177.877677 71.052558,-177.930472 71.041449,-178.206595 71.038398,-178.310111 71.013617,-178.875907 70.981024,-178.980277 70.95069,-179.342093 70.908026,-179.336234 70.911078,-179.322257 70.921698,-179.364493 70.930243,-179.457511 70.915534,-179.501212 70.919684,-179.666007 70.965461,-179.853385 70.979438,-179.888785 70.993598,-179.907523 70.996772,-179.999989 70.992011,-179.999989 71.024848,-179.999989 71.058661,-179.999989 71.126166,-179.999989 71.187018,-179.999989 71.224189,-179.999989 71.27497,-179.999989 71.312079,-179.999989 71.356024,-179.999989 71.410041,-179.999989 71.487799,-179.999989 71.536689,-179.862845 71.538642,-179.912223 71.555854,-179.900748 71.558478,-179.798819 71.569098,-179.757438 71.583197,-179.735953 71.586432,-179.715445 71.583258,-179.697501 71.577338,-179.678702 71.573676,-179.610831 71.585211,-179.372062 71.569098,-179.326774 71.555487,-179.306815 71.557563,-179.287162 71.562934,-179.24285 71.569098,-179.204642 71.583197,-179.074576 71.600043,-178.395438 71.539008,-178.32319 71.518365)))","010600000002000000010300000001000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140010300000001000000560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140944c4eed8c4066c0dfc14f1c40dc51409a97c3eebb4166c0a2cf471971db5140e789e76c814166c08c81751c3fdb5140266daaee114166c0f321a81abdda51404b3fe1ecd64066c0ce55f31c91da514058c6866ef64066c029cfbc1c76da5140295b24ed464166c0cbf8f71917da5140fd4cbd6e113f66c0cf49ef1b5fd95140be66b96cf43866c00588821953d55140736891edfc3666c04f95ef1989d3514010b056ed9a3666c04850fc1873d35140527e52edd33566c05e13d21a83d351404206f2ec723566c05f07ce1951d35140276728eef83466c0e4a3c519c3d2514038dc476ecd3266c01406651a4dd25140e78c28ed8d3166c0db6ad619dfd2514018e945edfe3066c088307e1af7d251406c26df6c733066c02fa52e19c7d25140dc65bfeef42f66c0b341261939d25140bc75feed323066c01c3f541a31d15140484e266e952f66c040170d198fd05140b5368dedb52e66c0cbf27519fecf5140edd45c6e302e66c02bbd361b2bcf514083da6fed442e66c0dfc14f1c40ce51409352d0eda52e66c0130ce71a66cd5140e5982cee3f3066c049b9fb1c1fcb51406a2fa2ed983266c0f50f221972c951408a22a46e673466c06d91b41b7dc751409d82fc6ce43566c0c3b986191ac7514063edef6c0f3866c03c2f151bf3c551403197546d373a66c0b14d2a1a6bc55140726e13ee153c66c02ac93a1c5dc35140003b376dc63d66c0ecdcb419a7c251408c101e6d9c4666c0d68ee21c75c251402864e76dec4966c0db6ad619dfc05140cdea1d6e075c66c0d525e318c9be5140ac00df6d5e5f66c097a8de1ad8bc514019e6046df26a66c0bc7a15191dba5140b439ce6dc26a66c0ba86191a4fba5140077de9ed4f6a66c079b29b19fdba5140df5339eda96b66c04f95ef1989bb5140eed11beea36e66c01118eb1b98ba51409c8bbfed097066c0e3a9471adcba5140077de9ed4f7566c01024ef1ccabd5140b43c0fee4e7b66c06b9db81cafbe51409d853ded707c66c0bfd7101c97bf5140c5abac6d0a7d66c06364c91ccbbf5140836beee8ff7f66c06d91b41b7dbf5140836beee8ff7f66c0bfd7101c97c15140836beee8ff7f66c03e23111ac1c35140836beee8ff7f66c07ff78e1a13c85140836beee8ff7f66c0da70581af8cb5140836beee8ff7f66c0dec7d11c59ce5140836beee8ff7f66c06458c51b99d15140836beee8ff7f66c02db1321af9d35140836beee8ff7f66c0d525e318c9d65140836beee8ff7f66c03a419b1c3eda5140836beee8ff7f66c060014c1938df5140836beee8ff7f66c0dec7d11c59e251408c101e6d9c7b66c021904b1c79e2514017f549ee307d66c073d6a71c93e35140ff3d78edd27c66c04562821abee351408d0dddec8f7966c0048e041a6ce45140edd79dee3c7866c00588821953e55140944c4eed8c7766c0fc54151a88e55140c6a2e9ece47666c058c85c1954e55140acfd9ded517666c03c2f151bf3e451405ab741edb77566c002a08a1bb7e45140410c74ed8b7366c09b90d61874e55140b34291eee76b66c0048e041a6ce45140dc65bfee746a66c09a9658198de3514083ddb06dd16966c06b9db81cafe35140edd45c6e306966c09df3531c07e45140adfa5c6dc56766c0048e041a6ce451406b2c616d8c6666c00588821953e55140212235ed626266c0664cc11a67e6514010b3976da74c66c01212691b7fe251409e0c8e92574a66c0079964e42ce15140"] + [mapnik.GeometryType.Point, + "POINT(30 10)", + "01010000000000000000003e400000000000002440"], + [mapnik.GeometryType.Point, + "POINT(30.0 10.0)", + "01010000000000000000003e400000000000002440"], + [mapnik.GeometryType.Point, + "POINT(30.1 10.1)", + "01010000009a99999999193e403333333333332440"], + [mapnik.GeometryType.LineString, + "LINESTRING(30 10,10 30,40 40)", + "0102000000030000000000000000003e40000000000000244000000000000024400000000000003e4000000000000044400000000000004440"], + [mapnik.GeometryType.Polygon, + "POLYGON((30 10,10 20,20 40,40 40,30 10))", + "010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440"], + [mapnik.GeometryType.Polygon, + "POLYGON((35 10,10 20,15 40,45 45,35 10),(20 30,35 35,30 20,20 30))", + "0103000000020000000500000000000000008041400000000000002440000000000000244000000000000034400000000000002e40000000000000444000000000008046400000000000804640000000000080414000000000000024400400000000000000000034400000000000003e40000000000080414000000000008041400000000000003e40000000000000344000000000000034400000000000003e40"], + [mapnik.GeometryType.MultiPoint, + "MULTIPOINT((10 40),(40 30),(20 20),(30 10))", + "010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003e4001010000000000000000003440000000000000344001010000000000000000003e400000000000002440"], + [mapnik.GeometryType.MultiLineString, + "MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10))", + "010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440"], + [mapnik.GeometryType.MultiPolygon, + "MULTIPOLYGON(((30 20,10 40,45 40,30 20)),((15 5,40 10,10 20,5 10,15 5)))", + "010600000002000000010300000001000000040000000000000000003e40000000000000344000000000000024400000000000004440000000000080464000000000000044400000000000003e400000000000003440010300000001000000050000000000000000002e4000000000000014400000000000004440000000000000244000000000000024400000000000003440000000000000144000000000000024400000000000002e400000000000001440"], + [mapnik.GeometryType.MultiPolygon, + "MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)))", + "01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003e40000000000000444000000000000044400103000000020000000600000000000000000034400000000000804140000000000080464000000000000034400000000000003e4000000000000014400000000000002440000000000000244000000000000024400000000000003e4000000000000034400000000000804140040000000000000000003e4000000000000034400000000000003440000000000000394000000000000034400000000000002e400000000000003e400000000000003440"], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))", + "01070000000300000001030000000100000005000000000000000000f03f000000000000f03f0000000000000040000000000000f03f00000000000000400000000000000040000000000000f03f0000000000000040000000000000f03f000000000000f03f0101000000000000000000004000000000000008400102000000020000000000000000000040000000000000084000000000000008400000000000001040"], + [mapnik.GeometryType.Polygon, "POLYGON((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365),(-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878 71.39643,-177.779837 71.333197,-177.718375 71.305243,-177.706412 71.3039,-177.68212 71.304877,-177.670279 71.301825,-177.655387 71.293158,-177.587577 71.285956,-177.548575 71.294867,-177.531119 71.296332,-177.51409 71.293402,-177.498649 71.284735,-177.506217 71.268622,-177.486991 71.258734,-177.459708 71.249884,-177.443412 71.237006,-177.445914 71.222663,-177.457755 71.209357,-177.507804 71.173774,-177.581168 71.147589,-177.637626 71.117011,-177.684134 71.110968,-177.751883 71.092963,-177.819266 71.084662,-177.877677 71.052558,-177.930472 71.041449,-178.206595 71.038398,-178.310111 71.013617,-178.875907 70.981024,-178.980277 70.95069,-179.342093 70.908026,-179.336234 70.911078,-179.322257 70.921698,-179.364493 70.930243,-179.457511 70.915534,-179.501212 70.919684,-179.666007 70.965461,-179.853385 70.979438,-179.888785 70.993598,-179.907523 70.996772,-179.999989 70.992011,-179.999989 71.024848,-179.999989 71.058661,-179.999989 71.126166,-179.999989 71.187018,-179.999989 71.224189,-179.999989 71.27497,-179.999989 71.312079,-179.999989 71.356024,-179.999989 71.410041,-179.999989 71.487799,-179.999989 71.536689,-179.862845 71.538642,-179.912223 71.555854,-179.900748 71.558478,-179.798819 71.569098,-179.757438 71.583197,-179.735953 71.586432,-179.715445 71.583258,-179.697501 71.577338,-179.678702 71.573676,-179.610831 71.585211,-179.372062 71.569098,-179.326774 71.555487,-179.306815 71.557563,-179.287162 71.562934,-179.24285 71.569098,-179.204642 71.583197,-179.074576 71.600043,-178.395438 71.539008,-178.32319 71.518365))", + "010300000002000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140944c4eed8c4066c0dfc14f1c40dc51409a97c3eebb4166c0a2cf471971db5140e789e76c814166c08c81751c3fdb5140266daaee114166c0f321a81abdda51404b3fe1ecd64066c0ce55f31c91da514058c6866ef64066c029cfbc1c76da5140295b24ed464166c0cbf8f71917da5140fd4cbd6e113f66c0cf49ef1b5fd95140be66b96cf43866c00588821953d55140736891edfc3666c04f95ef1989d3514010b056ed9a3666c04850fc1873d35140527e52edd33566c05e13d21a83d351404206f2ec723566c05f07ce1951d35140276728eef83466c0e4a3c519c3d2514038dc476ecd3266c01406651a4dd25140e78c28ed8d3166c0db6ad619dfd2514018e945edfe3066c088307e1af7d251406c26df6c733066c02fa52e19c7d25140dc65bfeef42f66c0b341261939d25140bc75feed323066c01c3f541a31d15140484e266e952f66c040170d198fd05140b5368dedb52e66c0cbf27519fecf5140edd45c6e302e66c02bbd361b2bcf514083da6fed442e66c0dfc14f1c40ce51409352d0eda52e66c0130ce71a66cd5140e5982cee3f3066c049b9fb1c1fcb51406a2fa2ed983266c0f50f221972c951408a22a46e673466c06d91b41b7dc751409d82fc6ce43566c0c3b986191ac7514063edef6c0f3866c03c2f151bf3c551403197546d373a66c0b14d2a1a6bc55140726e13ee153c66c02ac93a1c5dc35140003b376dc63d66c0ecdcb419a7c251408c101e6d9c4666c0d68ee21c75c251402864e76dec4966c0db6ad619dfc05140cdea1d6e075c66c0d525e318c9be5140ac00df6d5e5f66c097a8de1ad8bc514019e6046df26a66c0bc7a15191dba5140b439ce6dc26a66c0ba86191a4fba5140077de9ed4f6a66c079b29b19fdba5140df5339eda96b66c04f95ef1989bb5140eed11beea36e66c01118eb1b98ba51409c8bbfed097066c0e3a9471adcba5140077de9ed4f7566c01024ef1ccabd5140b43c0fee4e7b66c06b9db81cafbe51409d853ded707c66c0bfd7101c97bf5140c5abac6d0a7d66c06364c91ccbbf5140836beee8ff7f66c06d91b41b7dbf5140836beee8ff7f66c0bfd7101c97c15140836beee8ff7f66c03e23111ac1c35140836beee8ff7f66c07ff78e1a13c85140836beee8ff7f66c0da70581af8cb5140836beee8ff7f66c0dec7d11c59ce5140836beee8ff7f66c06458c51b99d15140836beee8ff7f66c02db1321af9d35140836beee8ff7f66c0d525e318c9d65140836beee8ff7f66c03a419b1c3eda5140836beee8ff7f66c060014c1938df5140836beee8ff7f66c0dec7d11c59e251408c101e6d9c7b66c021904b1c79e2514017f549ee307d66c073d6a71c93e35140ff3d78edd27c66c04562821abee351408d0dddec8f7966c0048e041a6ce45140edd79dee3c7866c00588821953e55140944c4eed8c7766c0fc54151a88e55140c6a2e9ece47666c058c85c1954e55140acfd9ded517666c03c2f151bf3e451405ab741edb77566c002a08a1bb7e45140410c74ed8b7366c09b90d61874e55140b34291eee76b66c0048e041a6ce45140dc65bfee746a66c09a9658198de3514083ddb06dd16966c06b9db81cafe35140edd45c6e306966c09df3531c07e45140adfa5c6dc56766c0048e041a6ce451406b2c616d8c6666c00588821953e55140212235ed626266c0664cc11a67e6514010b3976da74c66c01212691b7fe251409e0c8e92574a66c0079964e42ce15140"], + [mapnik.GeometryType.MultiPolygon, "MULTIPOLYGON(((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365)),((-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878 71.39643,-177.779837 71.333197,-177.718375 71.305243,-177.706412 71.3039,-177.68212 71.304877,-177.670279 71.301825,-177.655387 71.293158,-177.587577 71.285956,-177.548575 71.294867,-177.531119 71.296332,-177.51409 71.293402,-177.498649 71.284735,-177.506217 71.268622,-177.486991 71.258734,-177.459708 71.249884,-177.443412 71.237006,-177.445914 71.222663,-177.457755 71.209357,-177.507804 71.173774,-177.581168 71.147589,-177.637626 71.117011,-177.684134 71.110968,-177.751883 71.092963,-177.819266 71.084662,-177.877677 71.052558,-177.930472 71.041449,-178.206595 71.038398,-178.310111 71.013617,-178.875907 70.981024,-178.980277 70.95069,-179.342093 70.908026,-179.336234 70.911078,-179.322257 70.921698,-179.364493 70.930243,-179.457511 70.915534,-179.501212 70.919684,-179.666007 70.965461,-179.853385 70.979438,-179.888785 70.993598,-179.907523 70.996772,-179.999989 70.992011,-179.999989 71.024848,-179.999989 71.058661,-179.999989 71.126166,-179.999989 71.187018,-179.999989 71.224189,-179.999989 71.27497,-179.999989 71.312079,-179.999989 71.356024,-179.999989 71.410041,-179.999989 71.487799,-179.999989 71.536689,-179.862845 71.538642,-179.912223 71.555854,-179.900748 71.558478,-179.798819 71.569098,-179.757438 71.583197,-179.735953 71.586432,-179.715445 71.583258,-179.697501 71.577338,-179.678702 71.573676,-179.610831 71.585211,-179.372062 71.569098,-179.326774 71.555487,-179.306815 71.557563,-179.287162 71.562934,-179.24285 71.569098,-179.204642 71.583197,-179.074576 71.600043,-178.395438 71.539008,-178.32319 71.518365)))", + "010600000002000000010300000001000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140010300000001000000560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140944c4eed8c4066c0dfc14f1c40dc51409a97c3eebb4166c0a2cf471971db5140e789e76c814166c08c81751c3fdb5140266daaee114166c0f321a81abdda51404b3fe1ecd64066c0ce55f31c91da514058c6866ef64066c029cfbc1c76da5140295b24ed464166c0cbf8f71917da5140fd4cbd6e113f66c0cf49ef1b5fd95140be66b96cf43866c00588821953d55140736891edfc3666c04f95ef1989d3514010b056ed9a3666c04850fc1873d35140527e52edd33566c05e13d21a83d351404206f2ec723566c05f07ce1951d35140276728eef83466c0e4a3c519c3d2514038dc476ecd3266c01406651a4dd25140e78c28ed8d3166c0db6ad619dfd2514018e945edfe3066c088307e1af7d251406c26df6c733066c02fa52e19c7d25140dc65bfeef42f66c0b341261939d25140bc75feed323066c01c3f541a31d15140484e266e952f66c040170d198fd05140b5368dedb52e66c0cbf27519fecf5140edd45c6e302e66c02bbd361b2bcf514083da6fed442e66c0dfc14f1c40ce51409352d0eda52e66c0130ce71a66cd5140e5982cee3f3066c049b9fb1c1fcb51406a2fa2ed983266c0f50f221972c951408a22a46e673466c06d91b41b7dc751409d82fc6ce43566c0c3b986191ac7514063edef6c0f3866c03c2f151bf3c551403197546d373a66c0b14d2a1a6bc55140726e13ee153c66c02ac93a1c5dc35140003b376dc63d66c0ecdcb419a7c251408c101e6d9c4666c0d68ee21c75c251402864e76dec4966c0db6ad619dfc05140cdea1d6e075c66c0d525e318c9be5140ac00df6d5e5f66c097a8de1ad8bc514019e6046df26a66c0bc7a15191dba5140b439ce6dc26a66c0ba86191a4fba5140077de9ed4f6a66c079b29b19fdba5140df5339eda96b66c04f95ef1989bb5140eed11beea36e66c01118eb1b98ba51409c8bbfed097066c0e3a9471adcba5140077de9ed4f7566c01024ef1ccabd5140b43c0fee4e7b66c06b9db81cafbe51409d853ded707c66c0bfd7101c97bf5140c5abac6d0a7d66c06364c91ccbbf5140836beee8ff7f66c06d91b41b7dbf5140836beee8ff7f66c0bfd7101c97c15140836beee8ff7f66c03e23111ac1c35140836beee8ff7f66c07ff78e1a13c85140836beee8ff7f66c0da70581af8cb5140836beee8ff7f66c0dec7d11c59ce5140836beee8ff7f66c06458c51b99d15140836beee8ff7f66c02db1321af9d35140836beee8ff7f66c0d525e318c9d65140836beee8ff7f66c03a419b1c3eda5140836beee8ff7f66c060014c1938df5140836beee8ff7f66c0dec7d11c59e251408c101e6d9c7b66c021904b1c79e2514017f549ee307d66c073d6a71c93e35140ff3d78edd27c66c04562821abee351408d0dddec8f7966c0048e041a6ce45140edd79dee3c7866c00588821953e55140944c4eed8c7766c0fc54151a88e55140c6a2e9ece47666c058c85c1954e55140acfd9ded517666c03c2f151bf3e451405ab741edb77566c002a08a1bb7e45140410c74ed8b7366c09b90d61874e55140b34291eee76b66c0048e041a6ce45140dc65bfee746a66c09a9658198de3514083ddb06dd16966c06b9db81cafe35140edd45c6e306966c09df3531c07e45140adfa5c6dc56766c0048e041a6ce451406b2c616d8c6666c00588821953e55140212235ed626266c0664cc11a67e6514010b3976da74c66c01212691b7fe251409e0c8e92574a66c0079964e42ce15140"] ] geojson = [ - [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30,10]}'], - [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30.0,10.0]}'], - [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30.1,10.1]}'], - [mapnik.GeometryType.LineString,'{"type":"LineString","coordinates":[[30.0,10.0],[10.0,30.0],[40.0,40.0]]}'], - [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[30.0,10.0],[10.0,20.0],[20.0,40.0],[40.0,40.0],[30.0,10.0]]]}'], - [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[35.0,10.0],[10.0,20.0],[15.0,40.0],[45.0,45.0],[35.0,10.0]],[[20.0,30.0],[35.0,35.0],[30.0,20.0],[20.0,30.0]]]}'], - [mapnik.GeometryType.MultiPoint,'{"type":"MultiPoint","coordinates":[[10.0,40.0],[40.0,30.0],[20.0,20.0],[30.0,10.0]]}'], - [mapnik.GeometryType.MultiLineString,'{"type":"MultiLineString","coordinates":[[[10.0,10.0],[20.0,20.0],[10.0,40.0]],[[40.0,40.0],[30.0,30.0],[40.0,20.0],[30.0,10.0]]]}'], - [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[30.0,20.0],[10.0,40.0],[45.0,40.0],[30.0,20.0]]],[[[15.0,5.0],[40.0,10.0],[10.0,20.0],[5.0,10.0],[15.0,5.0]]]]}'], - [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[40.0,40.0],[20.0,45.0],[45.0,30.0],[40.0,40.0]]],[[[20.0,35.0],[45.0,20.0],[30.0,5.0],[10.0,10.0],[10.0,30.0],[20.0,35.0]],[[30.0,20.0],[20.0,25.0],[20.0,15.0],[30.0,20.0]]]]}'], - [mapnik.GeometryType.GeometryCollection,'{"type":"GeometryCollection","geometries":[{"type":"Polygon","coordinates":[[[1.0,1.0],[2.0,1.0],[2.0,2.0],[1.0,2.0],[1.0,1.0]]]},{"type":"Point","coordinates":[2.0,3.0]},{"type":"LineString","coordinates":[[2.0,3.0],[3.0,4.0]]}]}'], - [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]],[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415107],[-178.030082,71.413459],[-178.039908,71.40766],[-177.970878,71.39643],[-177.779837,71.333197],[-177.718375,71.305243],[-177.706412,71.3039],[-177.68212,71.304877],[-177.670279,71.301825],[-177.655387,71.293158],[-177.587577,71.285956],[-177.548575,71.294867],[-177.531119,71.296332],[-177.51409,71.293402],[-177.498649,71.284735],[-177.506217,71.268622],[-177.486991,71.258734],[-177.459708,71.249884],[-177.443412,71.237006],[-177.445914,71.222663],[-177.457755,71.209357],[-177.507804,71.173774],[-177.581168,71.147589],[-177.637626,71.117011],[-177.684134,71.110968],[-177.751883,71.092963],[-177.819266,71.084662],[-177.877677,71.052558],[-177.930472,71.041449],[-178.206595,71.038398],[-178.310111,71.013617],[-178.875907,70.981024],[-178.980277,70.95069],[-179.342093,70.908026],[-179.336234,70.911078],[-179.322257,70.921698],[-179.364493,70.930243],[-179.457511,70.915534],[-179.501212,70.919684],[-179.666007,70.965461],[-179.853385,70.979438],[-179.888785,70.993598],[-179.907523,70.996772],[-179.999989,70.992011],[-179.999989,71.024848],[-179.999989,71.058661],[-179.999989,71.126166],[-179.999989,71.187018],[-179.999989,71.224189],[-179.999989,71.27497],[-179.999989,71.312079],[-179.999989,71.356024],[-179.999989,71.410041],[-179.999989,71.487799],[-179.999989,71.536689],[-179.862845,71.538642],[-179.912223,71.555854],[-179.900748,71.558478],[-179.798819,71.569098],[-179.757438,71.583197],[-179.735953,71.586432],[-179.715445,71.583258],[-179.697501,71.577338],[-179.678702,71.573676],[-179.610831,71.585211],[-179.372062,71.569098],[-179.326774,71.555487],[-179.306815,71.557563],[-179.287162,71.562934],[-179.24285,71.569098],[-179.204642,71.583197],[-179.074576,71.600043],[-178.395438,71.539008],[-178.32319,71.518365]]]}'], - [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]]],[[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415107],[-178.030082,71.413459],[-178.039908,71.40766],[-177.970878,71.39643],[-177.779837,71.333197],[-177.718375,71.305243],[-177.706412,71.3039],[-177.68212,71.304877],[-177.670279,71.301825],[-177.655387,71.293158],[-177.587577,71.285956],[-177.548575,71.294867],[-177.531119,71.296332],[-177.51409,71.293402],[-177.498649,71.284735],[-177.506217,71.268622],[-177.486991,71.258734],[-177.459708,71.249884],[-177.443412,71.237006],[-177.445914,71.222663],[-177.457755,71.209357],[-177.507804,71.173774],[-177.581168,71.147589],[-177.637626,71.117011],[-177.684134,71.110968],[-177.751883,71.092963],[-177.819266,71.084662],[-177.877677,71.052558],[-177.930472,71.041449],[-178.206595,71.038398],[-178.310111,71.013617],[-178.875907,70.981024],[-178.980277,70.95069],[-179.342093,70.908026],[-179.336234,70.911078],[-179.322257,70.921698],[-179.364493,70.930243],[-179.457511,70.915534],[-179.501212,70.919684],[-179.666007,70.965461],[-179.853385,70.979438],[-179.888785,70.993598],[-179.907523,70.996772],[-179.999989,70.992011],[-179.999989,71.024848],[-179.999989,71.058661],[-179.999989,71.126166],[-179.999989,71.187018],[-179.999989,71.224189],[-179.999989,71.27497],[-179.999989,71.312079],[-179.999989,71.356024],[-179.999989,71.410041],[-179.999989,71.487799],[-179.999989,71.536689],[-179.862845,71.538642],[-179.912223,71.555854],[-179.900748,71.558478],[-179.798819,71.569098],[-179.757438,71.583197],[-179.735953,71.586432],[-179.715445,71.583258],[-179.697501,71.577338],[-179.678702,71.573676],[-179.610831,71.585211],[-179.372062,71.569098],[-179.326774,71.555487],[-179.306815,71.557563],[-179.287162,71.562934],[-179.24285,71.569098],[-179.204642,71.583197],[-179.074576,71.600043],[-178.395438,71.539008],[-178.32319,71.518365]]]]}'] + [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30,10]}'], + [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30.0,10.0]}'], + [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30.1,10.1]}'], + [mapnik.GeometryType.LineString, + '{"type":"LineString","coordinates":[[30.0,10.0],[10.0,30.0],[40.0,40.0]]}'], + [mapnik.GeometryType.Polygon, + '{"type":"Polygon","coordinates":[[[30.0,10.0],[10.0,20.0],[20.0,40.0],[40.0,40.0],[30.0,10.0]]]}'], + [mapnik.GeometryType.Polygon, + '{"type":"Polygon","coordinates":[[[35.0,10.0],[10.0,20.0],[15.0,40.0],[45.0,45.0],[35.0,10.0]],[[20.0,30.0],[35.0,35.0],[30.0,20.0],[20.0,30.0]]]}'], + [mapnik.GeometryType.MultiPoint, + '{"type":"MultiPoint","coordinates":[[10.0,40.0],[40.0,30.0],[20.0,20.0],[30.0,10.0]]}'], + [mapnik.GeometryType.MultiLineString, + '{"type":"MultiLineString","coordinates":[[[10.0,10.0],[20.0,20.0],[10.0,40.0]],[[40.0,40.0],[30.0,30.0],[40.0,20.0],[30.0,10.0]]]}'], + [mapnik.GeometryType.MultiPolygon, + '{"type":"MultiPolygon","coordinates":[[[[30.0,20.0],[10.0,40.0],[45.0,40.0],[30.0,20.0]]],[[[15.0,5.0],[40.0,10.0],[10.0,20.0],[5.0,10.0],[15.0,5.0]]]]}'], + [mapnik.GeometryType.MultiPolygon, + '{"type":"MultiPolygon","coordinates":[[[[40.0,40.0],[20.0,45.0],[45.0,30.0],[40.0,40.0]]],[[[20.0,35.0],[45.0,20.0],[30.0,5.0],[10.0,10.0],[10.0,30.0],[20.0,35.0]],[[30.0,20.0],[20.0,25.0],[20.0,15.0],[30.0,20.0]]]]}'], + [mapnik.GeometryType.GeometryCollection, + '{"type":"GeometryCollection","geometries":[{"type":"Polygon","coordinates":[[[1.0,1.0],[2.0,1.0],[2.0,2.0],[1.0,2.0],[1.0,1.0]]]},{"type":"Point","coordinates":[2.0,3.0]},{"type":"LineString","coordinates":[[2.0,3.0],[3.0,4.0]]}]}'], + [mapnik.GeometryType.Polygon, '{"type":"Polygon","coordinates":[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]],[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415107],[-178.030082,71.413459],[-178.039908,71.40766],[-177.970878,71.39643],[-177.779837,71.333197],[-177.718375,71.305243],[-177.706412,71.3039],[-177.68212,71.304877],[-177.670279,71.301825],[-177.655387,71.293158],[-177.587577,71.285956],[-177.548575,71.294867],[-177.531119,71.296332],[-177.51409,71.293402],[-177.498649,71.284735],[-177.506217,71.268622],[-177.486991,71.258734],[-177.459708,71.249884],[-177.443412,71.237006],[-177.445914,71.222663],[-177.457755,71.209357],[-177.507804,71.173774],[-177.581168,71.147589],[-177.637626,71.117011],[-177.684134,71.110968],[-177.751883,71.092963],[-177.819266,71.084662],[-177.877677,71.052558],[-177.930472,71.041449],[-178.206595,71.038398],[-178.310111,71.013617],[-178.875907,70.981024],[-178.980277,70.95069],[-179.342093,70.908026],[-179.336234,70.911078],[-179.322257,70.921698],[-179.364493,70.930243],[-179.457511,70.915534],[-179.501212,70.919684],[-179.666007,70.965461],[-179.853385,70.979438],[-179.888785,70.993598],[-179.907523,70.996772],[-179.999989,70.992011],[-179.999989,71.024848],[-179.999989,71.058661],[-179.999989,71.126166],[-179.999989,71.187018],[-179.999989,71.224189],[-179.999989,71.27497],[-179.999989,71.312079],[-179.999989,71.356024],[-179.999989,71.410041],[-179.999989,71.487799],[-179.999989,71.536689],[-179.862845,71.538642],[-179.912223,71.555854],[-179.900748,71.558478],[-179.798819,71.569098],[-179.757438,71.583197],[-179.735953,71.586432],[-179.715445,71.583258],[-179.697501,71.577338],[-179.678702,71.573676],[-179.610831,71.585211],[-179.372062,71.569098],[-179.326774,71.555487],[-179.306815,71.557563],[-179.287162,71.562934],[-179.24285,71.569098],[-179.204642,71.583197],[-179.074576,71.600043],[-178.395438,71.539008],[-178.32319,71.518365]]]}'], + [mapnik.GeometryType.MultiPolygon, '{"type":"MultiPolygon","coordinates":[[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]]],[[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415107],[-178.030082,71.413459],[-178.039908,71.40766],[-177.970878,71.39643],[-177.779837,71.333197],[-177.718375,71.305243],[-177.706412,71.3039],[-177.68212,71.304877],[-177.670279,71.301825],[-177.655387,71.293158],[-177.587577,71.285956],[-177.548575,71.294867],[-177.531119,71.296332],[-177.51409,71.293402],[-177.498649,71.284735],[-177.506217,71.268622],[-177.486991,71.258734],[-177.459708,71.249884],[-177.443412,71.237006],[-177.445914,71.222663],[-177.457755,71.209357],[-177.507804,71.173774],[-177.581168,71.147589],[-177.637626,71.117011],[-177.684134,71.110968],[-177.751883,71.092963],[-177.819266,71.084662],[-177.877677,71.052558],[-177.930472,71.041449],[-178.206595,71.038398],[-178.310111,71.013617],[-178.875907,70.981024],[-178.980277,70.95069],[-179.342093,70.908026],[-179.336234,70.911078],[-179.322257,70.921698],[-179.364493,70.930243],[-179.457511,70.915534],[-179.501212,70.919684],[-179.666007,70.965461],[-179.853385,70.979438],[-179.888785,70.993598],[-179.907523,70.996772],[-179.999989,70.992011],[-179.999989,71.024848],[-179.999989,71.058661],[-179.999989,71.126166],[-179.999989,71.187018],[-179.999989,71.224189],[-179.999989,71.27497],[-179.999989,71.312079],[-179.999989,71.356024],[-179.999989,71.410041],[-179.999989,71.487799],[-179.999989,71.536689],[-179.862845,71.538642],[-179.912223,71.555854],[-179.900748,71.558478],[-179.798819,71.569098],[-179.757438,71.583197],[-179.735953,71.586432],[-179.715445,71.583258],[-179.697501,71.577338],[-179.678702,71.573676],[-179.610831,71.585211],[-179.372062,71.569098],[-179.326774,71.555487],[-179.306815,71.557563],[-179.287162,71.562934],[-179.24285,71.569098],[-179.204642,71.583197],[-179.074576,71.600043],[-178.395438,71.539008],[-178.32319,71.518365]]]]}'] ] geojson_reversed = [ @@ -65,24 +88,27 @@ def setup(): '{"coordinates":[[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]]],[[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415107],[-178.030082,71.413459],[-178.039908,71.40766],[-177.970878,71.39643],[-177.779837,71.333197],[-177.718375,71.305243],[-177.706412,71.3039],[-177.68212,71.304877],[-177.670279,71.301825],[-177.655387,71.293158],[-177.587577,71.285956],[-177.548575,71.294867],[-177.531119,71.296332],[-177.51409,71.293402],[-177.498649,71.284735],[-177.506217,71.268622],[-177.486991,71.258734],[-177.459708,71.249884],[-177.443412,71.237006],[-177.445914,71.222663],[-177.457755,71.209357],[-177.507804,71.173774],[-177.581168,71.147589],[-177.637626,71.117011],[-177.684134,71.110968],[-177.751883,71.092963],[-177.819266,71.084662],[-177.877677,71.052558],[-177.930472,71.041449],[-178.206595,71.038398],[-178.310111,71.013617],[-178.875907,70.981024],[-178.980277,70.95069],[-179.342093,70.908026],[-179.336234,70.911078],[-179.322257,70.921698],[-179.364493,70.930243],[-179.457511,70.915534],[-179.501212,70.919684],[-179.666007,70.965461],[-179.853385,70.979438],[-179.888785,70.993598],[-179.907523,70.996772],[-179.999989,70.992011],[-179.999989,71.024848],[-179.999989,71.058661],[-179.999989,71.126166],[-179.999989,71.187018],[-179.999989,71.224189],[-179.999989,71.27497],[-179.999989,71.312079],[-179.999989,71.356024],[-179.999989,71.410041],[-179.999989,71.487799],[-179.999989,71.536689],[-179.862845,71.538642],[-179.912223,71.555854],[-179.900748,71.558478],[-179.798819,71.569098],[-179.757438,71.583197],[-179.735953,71.586432],[-179.715445,71.583258],[-179.697501,71.577338],[-179.678702,71.573676],[-179.610831,71.585211],[-179.372062,71.569098],[-179.326774,71.555487],[-179.306815,71.557563],[-179.287162,71.562934],[-179.24285,71.569098],[-179.204642,71.583197],[-179.074576,71.600043],[-178.395438,71.539008],[-178.32319,71.518365]]]],"type":"MultiPolygon"}' ] -geojson_nulls = [ - '{ "type": "Feature", "properties": { }, "geometry": null }', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "Point", "coordinates": [] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [] ] ] } }', - '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [], "type": "Point" }}', - '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [] ], "type": "LineString" }}', - '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [ [] ] ], "type": "Polygon" } }', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [],[] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [ [] ] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [] ] ] }}', - '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [] ] ] ] }}', +valid_empty_geometries = [ + 'null', # Point can't be empty + '{ "type": "LineString" , "coordinates": []}', + '{ "type": "Polygon" , "coordinates": [[]]}', + '{ "type": "MultiPoint" , "coordinates": []}', + '{ "type": "MultiLineString" , "coordinates": [[]]}', + '{ "type": "MultiPolygon" , "coordinates": [[[]]]}', ] -# valid, but empty wkb's (http://trac.osgeo.org/postgis/wiki/DevWikiEmptyGeometry) +invalid_empty_geometries = [ + '{ "type": "Point" , "coordinates": []}', # Point can't be empty + '{ "type": "LineString" , "coordinates": [[]]}', + '{ "type": "Polygon" , "coordinates": [[[]]]}', + '{ "type": "MultiPoint" , "coordinates": [[]]}', + '{ "type": "MultiLineString" , "coordinates": [[[]]]}', + '{ "type": "MultiPolygon" , "coordinates": [[[[]]]]}', +] + + +# valid, but empty wkb's +# (http://trac.osgeo.org/postgis/wiki/DevWikiEmptyGeometry) empty_wkbs = [ # TODO - this is messed up: round trips as MULTIPOINT EMPTY # template_postgis=# select ST_AsText(ST_GeomFromEWKB(decode(encode(ST_GeomFromText('POINT EMPTY'),'hex'),'hex'))); @@ -91,35 +117,48 @@ def setup(): # MULTIPOINT EMPTY #(1 row) #[ mapnik.GeometryType.Point, "Point EMPTY", '010400000000000000'], - [ mapnik.GeometryType.MultiPoint, "MULTIPOINT EMPTY", '010400000000000000'], - [ mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000'], - [ mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000' ], - [ mapnik.GeometryType.MultiLineString, "MULTILINESTRING EMPTY", '010500000000000000'], - [ mapnik.GeometryType.Polygon, "Polygon EMPTY", '010300000000000000'], - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION EMPTY", '010700000000000000'], - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(LINESTRING EMPTY,LINESTRING EMPTY)", '010700000000000000'], - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,POINT EMPTY)", '010700000000000000'], + [mapnik.GeometryType.MultiPoint, "MULTIPOINT EMPTY", '010400000000000000'], + [mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000'], + [mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000'], + [mapnik.GeometryType.MultiLineString, + "MULTILINESTRING EMPTY", + '010500000000000000'], + [mapnik.GeometryType.Polygon, "Polygon EMPTY", '010300000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION EMPTY", '010700000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(LINESTRING EMPTY,LINESTRING EMPTY)", + '010700000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(POINT EMPTY,POINT EMPTY)", + '010700000000000000'], ] partially_empty_wkb = [ # TODO - currently this is not considered empty # even though one part is - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10)),LINESTRING EMPTY)", '010700000002000000010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440010200000000000000'], - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,POINT(0 0))", '010700000002000000010400000000000000010100000000000000000000000000000000000000'], - [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,MULTIPOINT(0 0))", '010700000002000000010400000000000000010400000001000000010100000000000000000000000000000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10)),LINESTRING EMPTY)", + '010700000002000000010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440010200000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(POINT EMPTY,POINT(0 0))", + '010700000002000000010400000000000000010100000000000000000000000000000000000000'], + [mapnik.GeometryType.GeometryCollection, + "GEOMETRYCOLLECTION(POINT EMPTY,MULTIPOINT(0 0))", + '010700000002000000010400000000000000010400000001000000010100000000000000000000000000000000000000'], ] # unsupported types unsupported_wkb = [ - [ "MULTIPOLYGON EMPTY", '010600000000000000'], - [ "TRIANGLE EMPTY", '011100000000000000'], - [ "CircularString EMPTY", '010800000000000000'], - [ "CurvePolygon EMPTY", '010A00000000000000'], - [ "CompoundCurve EMPTY", '010900000000000000'], - [ "MultiCurve EMPTY", '010B00000000000000'], - [ "MultiSurface EMPTY", '010C00000000000000'], - [ "PolyhedralSurface EMPTY", '010F00000000000000'], - [ "TIN EMPTY", '011000000000000000'], + ["MULTIPOLYGON EMPTY", '010600000000000000'], + ["TRIANGLE EMPTY", '011100000000000000'], + ["CircularString EMPTY", '010800000000000000'], + ["CurvePolygon EMPTY", '010A00000000000000'], + ["CompoundCurve EMPTY", '010900000000000000'], + ["MultiCurve EMPTY", '010B00000000000000'], + ["MultiSurface EMPTY", '010C00000000000000'], + ["PolyhedralSurface EMPTY", '010F00000000000000'], + ["TIN EMPTY", '011000000000000000'], # TODO - a few bogus inputs # enable if we start range checking to avoid crashing on invalid input? # https://github.com/mapnik/mapnik/issues/2236 @@ -128,21 +167,24 @@ def setup(): #[ "0000", '0104' ], ] + def test_path_geo_interface(): geom = mapnik.Geometry.from_wkt('POINT(0 0)') - eq_(geom.__geo_interface__,{u'type': u'Point', u'coordinates': [0, 0]}) + assert geom.__geo_interface__, {u'type': u'Point', u'coordinates': [0 == 0]} + def test_valid_wkb_parsing(): count = 0 for wkb in empty_wkbs: geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2])) - eq_(geom.is_empty(),True) - eq_(geom.type(),wkb[0]) + assert geom.is_empty() == True + assert geom.type() == wkb[0] for wkb in wkts: geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2])) - eq_(geom.is_empty(),False) - eq_(geom.type(),wkb[0]) + assert geom.is_empty() == False + assert geom.type() == wkb[0] + def test_wkb_parsing_error(): count = 0 @@ -150,19 +192,22 @@ def test_wkb_parsing_error(): try: geom = mapnik.Geometry.from_wkb(unhexlify(wkb)) # should not get here - eq_(True,False) + assert True == False except: pass assert True # for partially empty wkbs don't currently look empty right now # since the enclosing container has objects + + def test_empty_wkb_parsing(): count = 0 for wkb in partially_empty_wkb: geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2])) - eq_(geom.type(),wkb[0]) - eq_(geom.is_empty(),False) + assert geom.type() == wkb[0] + assert geom.is_empty() == False + def test_geojson_parsing(): geometries = [] @@ -170,104 +215,118 @@ def test_geojson_parsing(): for j in geojson: count += 1 geometries.append(mapnik.Geometry.from_geojson(j[1])) - eq_(count,len(geometries)) + assert count == len(geometries) + def test_geojson_parsing_reversed(): - for idx,j in enumerate(geojson_reversed): + for idx, j in enumerate(geojson_reversed): g1 = mapnik.Geometry.from_geojson(j) g2 = mapnik.Geometry.from_geojson(geojson[idx][1]) - eq_(g1.to_geojson(), g2.to_geojson()) + assert g1.to_geojson() == g2.to_geojson() # http://geojson.org/geojson-spec.html#positions + + def test_geojson_point_positions(): input_json = '{"type":"Point","coordinates":[30,10]}' geom = mapnik.Geometry.from_geojson(input_json) - eq_(geom.to_geojson(),input_json) + assert geom.to_geojson() == input_json # should ignore all but the first two - geom = mapnik.Geometry.from_geojson('{"type":"Point","coordinates":[30,10,50,50,50,50]}') - eq_(geom.to_geojson(),input_json) + geom = mapnik.Geometry.from_geojson( + '{"type":"Point","coordinates":[30,10,50,50,50,50]}') + assert geom.to_geojson() == input_json + def test_geojson_point_positions2(): input_json = '{"type":"LineString","coordinates":[[30,10],[10,30],[40,40]]}' geom = mapnik.Geometry.from_geojson(input_json) - eq_(geom.to_geojson(),input_json) + assert geom.to_geojson() == input_json # should ignore all but the first two - geom = mapnik.Geometry.from_geojson('{"type":"LineString","coordinates":[[30.0,10.0,0,0,0],[10.0,30.0,0,0,0],[40.0,40.0,0,0,0]]}') - eq_(geom.to_geojson(),input_json) + geom = mapnik.Geometry.from_geojson( + '{"type":"LineString","coordinates":[[30.0,10.0,0,0,0],[10.0,30.0,0,0,0],[40.0,40.0,0,0,0]]}') + assert geom.to_geojson() == input_json -def compare_wkb_from_wkt(wkt,type): + +def compare_wkb_from_wkt(wkt, type): geom = mapnik.Geometry.from_wkt(wkt) - eq_(geom.type(),type) + assert geom.type() == type + -def compare_wkt_to_geojson(idx,wkt,num=None): +def compare_wkt_to_geojson(idx, wkt, num=None): geom = mapnik.Geometry.from_wkt(wkt) # ensure both have same result gj = geom.to_geojson() - eq_(len(gj) > 1,True) + assert len(gj) > 1 == True a = json.loads(gj) e = json.loads(geojson[idx][1]) - eq_(a,e) + assert a == e + def test_wkt_simple(): for wkt in wkts: try: geom = mapnik.Geometry.from_wkt(wkt[1]) - eq_(geom.type(),wkt[0]) - except RuntimeError, e: + assert geom.type() == wkt[0] + except RuntimeError as e: raise RuntimeError('%s %s' % (e, wkt)) + def test_wkb_simple(): for wkt in wkts: try: - compare_wkb_from_wkt(wkt[1],wkt[0]) - except RuntimeError, e: + compare_wkb_from_wkt(wkt[1], wkt[0]) + except RuntimeError as e: raise RuntimeError('%s %s' % (e, wkt)) + def test_wkt_to_geojson(): idx = -1 for wkt in wkts: try: idx += 1 - compare_wkt_to_geojson(idx,wkt[1],wkt[0]) - except RuntimeError, e: + compare_wkt_to_geojson(idx, wkt[1], wkt[0]) + except RuntimeError as e: raise RuntimeError('%s %s' % (e, wkt)) + def test_wkt_rounding(): # currently fails because we use output precision of 6 - should we make configurable? https://github.com/mapnik/mapnik/issues/1009 # if precision is set to 15 still fails due to very subtle rounding issues wkt = "POLYGON((7.904185 54.180426,7.89918 54.178168,7.897715 54.182318,7.893565 54.183111,7.890391 54.187567,7.885874 54.19068,7.879893 54.193915,7.894541 54.194647,7.900645 54.19068,7.904185 54.180426))" geom = mapnik.Geometry.from_wkt(wkt) - eq_(geom.type(),mapnik.GeometryType.Polygon) + assert geom.type() == mapnik.GeometryType.Polygon + def test_wkt_collection_flattening(): wkt = 'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POLYGON((40 40,20 45,45 30,40 40)),POLYGON((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)),LINESTRING(2 3,3 4))' # currently fails as the MULTIPOLYGON inside will be returned as multiple polygons - not a huge deal - should we worry? #wkt = "GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20))),LINESTRING(2 3,3 4))" geom = mapnik.Geometry.from_wkt(wkt) - eq_(geom.type(),mapnik.GeometryType.GeometryCollection) + assert geom.type() == mapnik.GeometryType.GeometryCollection + def test_creating_feature_from_geojson(): json_feat = { - "type": "Feature", - "geometry": {"type": "Point", "coordinates": [-122,48]}, - "properties": {"name": "value"} + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [-122, 48]}, + "properties": {"name": "value"} } ctx = mapnik.Context() - feat = mapnik.Feature.from_geojson(json.dumps(json_feat),ctx) - eq_(feat.id(),1) - eq_(feat['name'],u'value') - -def test_handling_geojson_null_geoms(): - for j in geojson_nulls: - ctx = mapnik.Context() - out_json = mapnik.Feature.from_geojson(j,ctx).to_geojson() - expected = '{"type":"Feature","id":1,"geometry":null,"properties":{}}' - eq_(out_json,expected) - # ensure it round trips - eq_(mapnik.Feature.from_geojson(out_json,ctx).to_geojson(),expected) - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + feat = mapnik.Feature.from_geojson(json.dumps(json_feat), ctx) + assert feat.id() == 1 + assert feat['name'] == u'value' + + +def test_handling_valid_geojson_empty_geometries(): + for json in valid_empty_geometries: + geom = mapnik.Geometry.from_geojson(json) + out_json = geom.to_geojson() + # check round trip + assert json.replace(" ","") == out_json + + +def test_handling_invalid_geojson_empty_geometries(): + with pytest.raises(RuntimeError): + for json in invalid_empty_geometries: + mapnik.Geometry.from_geojson(json) diff --git a/test/python_tests/grayscale_test.py b/test/python_tests/grayscale_test.py index 2bcf8361b..96e33bd5e 100644 --- a/test/python_tests/grayscale_test.py +++ b/test/python_tests/grayscale_test.py @@ -1,13 +1,8 @@ import mapnik -from nose.tools import eq_ -from utilities import run_all def test_grayscale_conversion(): - im = mapnik.Image(2,2) + im = mapnik.Image(2, 2) im.fill(mapnik.Color('white')) im.set_grayscale_to_alpha() - pixel = im.get_pixel(0,0) - eq_((pixel >> 24) & 0xff,255); - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + pixel = im.get_pixel(0, 0) + assert (pixel >> 24) & 0xff == 255 diff --git a/test/python_tests/image_encoding_speed_test.py b/test/python_tests/image_encoding_speed_test.py index 75bbc85af..507da9107 100644 --- a/test/python_tests/image_encoding_speed_test.py +++ b/test/python_tests/image_encoding_speed_test.py @@ -1,14 +1,5 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik from timeit import Timer, time -from utilities import execution_path, run_all - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) +import mapnik combinations = ['png', 'png8', @@ -21,7 +12,7 @@ def setup(): 'png8:m=h:t=1', 'png8:m=h:t=2', 'png:z=1', - 'png:z=1:t=0', # forces rbg, no a + 'png:z=1:t=0', # forces rbg, no a 'png8:z=1', 'png8:z=1:m=o', 'png8:z=1:m=h', @@ -41,20 +32,19 @@ def setup(): 'png:z=1:s=huff', 'png:z=1:s=rle', 'png8:m=h:g=2.0', - 'png8:m=h:g=1.0', - 'png:e=miniz', - 'png8:e=miniz' - ] + 'png8:m=h:g=1.0' + ] tiles = [ -'blank', -'solid', -'many_colors', -'aerial_24' + 'blank', + 'solid', + 'many_colors', + 'aerial_24' ] iterations = 10 + def do_encoding(): global image @@ -66,59 +56,55 @@ def run(func, im, format, t): global image image = im start = time.time() - set = t.repeat(iterations,1) + set = t.repeat(iterations, 1) elapsed = (time.time() - start) - min_ = min(set)*1000 - avg = (sum(set)/len(set))*1000 + min_ = min(set) * 1000 + avg = (sum(set) / len(set)) * 1000 name = func.__name__ + ' ' + format - results[name] = [min_,avg,elapsed*1000,name,len(func())] + results[name] = [min_, avg, elapsed * 1000, name, len(func())] sortable[name] = [min_] if 'blank' in tiles: def blank(): - return eval('image.tostring("%s")' % c) - blank_im = mapnik.Image(512,512) + return eval('image.to_string("%s")' % c) + blank_im = mapnik.Image(512, 512) for c in combinations: t = Timer(blank) - run(blank,blank_im,c,t) + run(blank, blank_im, c, t) if 'solid' in tiles: def solid(): - return eval('image.tostring("%s")' % c) - solid_im = mapnik.Image(512,512) + return eval('image.to_string("%s")' % c) + solid_im = mapnik.Image(512, 512) solid_im.fill(mapnik.Color("#f2efe9")) for c in combinations: t = Timer(solid) - run(solid,solid_im,c,t) + run(solid, solid_im, c, t) if 'many_colors' in tiles: def many_colors(): - return eval('image.tostring("%s")' % c) + return eval('image.to_string("%s")' % c) # lots of colors: http://tile.osm.org/13/4194/2747.png many_colors_im = mapnik.Image.open('../data/images/13_4194_2747.png') for c in combinations: t = Timer(many_colors) - run(many_colors,many_colors_im,c,t) + run(many_colors, many_colors_im, c, t) if 'aerial_24' in tiles: def aerial_24(): - return eval('image.tostring("%s")' % c) + return eval('image.to_string("%s")' % c) aerial_24_im = mapnik.Image.open('../data/images/12_654_1580.png') for c in combinations: t = Timer(aerial_24) - run(aerial_24,aerial_24_im,c,t) + run(aerial_24, aerial_24_im, c, t) - for key, value in sorted(sortable.iteritems(), key=lambda (k,v): (v,k)): + for key, value in sorted(sortable.items(), key=lambda i: (i[1], i[0])): s = results[key] min_ = str(s[0])[:6] avg = str(s[1])[:6] elapsed = str(s[2])[:6] name = s[3] size = s[4] - print 'min: %sms | avg: %sms | total: %sms | len: %s <-- %s' % (min_,avg,elapsed,size,name) - - -if __name__ == "__main__": - setup() - do_encoding() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + print( + 'min: %sms | avg: %sms | total: %sms | len: %s <-- %s' % + (min_, avg, elapsed, size, name)) diff --git a/test/python_tests/image_filters_test.py b/test/python_tests/image_filters_test.py index 269d64ca2..93666aa4a 100644 --- a/test/python_tests/image_filters_test.py +++ b/test/python_tests/image_filters_test.py @@ -1,15 +1,14 @@ -#!/usr/bin/env python - -from nose.tools import eq_ -from utilities import execution_path, run_all -from utilities import side_by_side_image -import os, mapnik -import re +import re, os +import mapnik +import pytest +from .utilities import side_by_side_image, execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield def replace_style(m, name, style): m.remove_style(name) @@ -17,14 +16,14 @@ def replace_style(m, name, style): def test_append(): s = mapnik.Style() - eq_(s.image_filters,'') + assert s.image_filters == '' s.image_filters = 'gray' - eq_(s.image_filters,'gray') + assert s.image_filters == 'gray' s.image_filters = 'sharpen' - eq_(s.image_filters,'sharpen') + assert s.image_filters == 'sharpen' if 'shape' in mapnik.DatasourceCache.plugin_names(): - def test_style_level_image_filter(): + def test_style_level_image_filter(setup): m = mapnik.Map(256, 256) mapnik.load_map(m, '../data/good_maps/style_level_image_filter.xml') m.zoom_all() @@ -49,20 +48,22 @@ def test_style_level_image_filter(): mapnik.render(m, im) actual = '/tmp/mapnik-style-image-filter-' + filename + '.png' expected = 'images/style-image-filter/' + filename + '.png' - im.save(actual,"png32") + im.save(actual, "png32") if not os.path.exists(expected) or os.environ.get('UPDATE'): - print 'generating expected test image: %s' % expected - im.save(expected,'png32') + print('generating expected test image: %s' % expected) + im.save(expected, 'png32') expected_im = mapnik.Image.open(expected) # compare them - if im.tostring('png32') == expected_im.tostring('png32'): + if im.to_string('png32') == expected_im.to_string('png32'): successes.append(name) else: - fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected)) + fails.append( + 'failed comparing actual (%s) and expected(%s)' % + (actual, expected)) fail_im = side_by_side_image(expected_im, im) - fail_im.save('/tmp/mapnik-style-image-filter-' + filename + '.fail.png','png32') - eq_(len(fails), 0, '\n'+'\n'.join(fails)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + fail_im.save( + '/tmp/mapnik-style-image-filter-' + + filename + + '.fail.png', + 'png32') + assert len(fails) == 0, '\n' + '\n'.join(fails) diff --git a/test/python_tests/image_test.py b/test/python_tests/image_test.py index 189f8beb5..3a72cceb5 100644 --- a/test/python_tests/image_test.py +++ b/test/python_tests/image_test.py @@ -1,346 +1,377 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +import os +import mapnik +import pytest -import os, mapnik -from nose.tools import eq_,raises, assert_almost_equal -from utilities import execution_path, run_all, get_unique_colors +from .utilities import READ_FLAGS, get_unique_colors, execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_type(): +def test_type(setup): im = mapnik.Image(256, 256) - eq_(im.get_type(), mapnik.ImageType.rgba8) + assert im.get_type() == mapnik.ImageType.rgba8 im = mapnik.Image(256, 256, mapnik.ImageType.gray8) - eq_(im.get_type(), mapnik.ImageType.gray8) + assert im.get_type() == mapnik.ImageType.gray8 + def test_image_premultiply(): - im = mapnik.Image(256,256) - eq_(im.premultiplied(),False) + im = mapnik.Image(256, 256) + assert im.premultiplied() == False # Premultiply should return true that it worked - eq_(im.premultiply(), True) - eq_(im.premultiplied(),True) + assert im.premultiply() == True + assert im.premultiplied() == True # Premultipling again should return false as nothing should happen - eq_(im.premultiply(), False) - eq_(im.premultiplied(),True) + assert im.premultiply() == False + assert im.premultiplied() == True # Demultiply should return true that it worked - eq_(im.demultiply(), True) - eq_(im.premultiplied(),False) + assert im.demultiply() == True + assert im.premultiplied() == False # Demultiply again should not work and return false as it did nothing - eq_(im.demultiply(), False) - eq_(im.premultiplied(),False) + assert im.demultiply() == False + assert im.premultiplied() == False + def test_image_premultiply_values(): - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color(16, 33, 255, 128)) im.premultiply() - c = im.get_pixel(0,0, True) - eq_(c.r, 8) - eq_(c.g, 17) - eq_(c.b, 128) - eq_(c.a, 128) + c = im.get_pixel_color(0, 0) + assert c.r == 8 + assert c.g == 17 + assert c.b == 128 + assert c.a == 128 im.demultiply() - # Do to the nature of this operation the result will not be exactly the same - c = im.get_pixel(0,0,True) - eq_(c.r,15) - eq_(c.g,33) - eq_(c.b,255) - eq_(c.a,128) + # Do to the nature of this operation the result will not be exactly the + # same + c = im.get_pixel_color(0, 0) + assert c.r == 15 + assert c.g == 33 + assert c.b == 255 + assert c.a == 128 + def test_apply_opacity(): - im = mapnik.Image(4,4) - im.fill(mapnik.Color(128,128,128,128)) - im.apply_opacity(0.75); - c = im.get_pixel(0,0,True) - eq_(c.r,128) - eq_(c.g,128) - eq_(c.b,128) - eq_(c.a,96) + im = mapnik.Image(4, 4) + im.fill(mapnik.Color(128, 128, 128, 128)) + im.apply_opacity(0.75) + c = im.get_pixel_color(0, 0) + assert c.r == 128 + assert c.g == 128 + assert c.b == 128 + assert c.a == 96 + def test_background(): - im = mapnik.Image(256,256) - eq_(im.premultiplied(), False) - im.fill(mapnik.Color(32,64,125,128)) - eq_(im.premultiplied(), False) - c = im.get_pixel(0,0,True) - eq_(c.get_premultiplied(), False) - eq_(c.r,32) - eq_(c.g,64) - eq_(c.b,125) - eq_(c.a,128) + im = mapnik.Image(256, 256) + assert im.premultiplied() == False + im.fill(mapnik.Color(32, 64, 125, 128)) + assert im.premultiplied() == False + c = im.get_pixel_color(0, 0) + assert c.get_premultiplied() == False + assert c.r == 32 + assert c.g == 64 + assert c.b == 125 + assert c.a == 128 # Now again with a premultiplied alpha - im.fill(mapnik.Color(32,64,125,128,True)) - eq_(im.premultiplied(), True) - c = im.get_pixel(0,0,True) - eq_(c.get_premultiplied(), True) - eq_(c.r,32) - eq_(c.g,64) - eq_(c.b,125) - eq_(c.a,128) + im.fill(mapnik.Color(32, 64, 125, 128, True)) + assert im.premultiplied() == True + c = im.get_pixel_color(0, 0) + assert c.get_premultiplied() == True + assert c.r == 32 + assert c.g == 64 + assert c.b == 125 + assert c.a == 128 + def test_set_and_get_pixel(): # Create an image that is not premultiplied - im = mapnik.Image(256,256) - c0 = mapnik.Color(16,33,255,128) - c0_pre = mapnik.Color(16,33,255,128, True) - im.set_pixel(0,0,c0) - im.set_pixel(1,1,c0_pre) + im = mapnik.Image(256, 256) + c0 = mapnik.Color(16, 33, 255, 128) + c0_pre = mapnik.Color(16, 33, 255, 128, True) + im.set_pixel(0, 0, c0) + im.set_pixel(1, 1, c0_pre) # No differences for non premultiplied pixels - c1_int = mapnik.Color(im.get_pixel(0,0)) - eq_(c0.r, c1_int.r) - eq_(c0.g, c1_int.g) - eq_(c0.b, c1_int.b) - eq_(c0.a, c1_int.a) - c1 = im.get_pixel(0,0,True) - eq_(c0.r, c1.r) - eq_(c0.g, c1.g) - eq_(c0.b, c1.b) - eq_(c0.a, c1.a) + c1_int = mapnik.Color(im.get_pixel(0, 0)) + assert c0.r == c1_int.r + assert c0.g == c1_int.g + assert c0.b == c1_int.b + assert c0.a == c1_int.a + c1 = im.get_pixel_color(0, 0) + assert c0.r == c1.r + assert c0.g == c1.g + assert c0.b == c1.b + assert c0.a == c1.a # The premultiplied Color should be demultiplied before being applied. c0_pre.demultiply() - c1_int = mapnik.Color(im.get_pixel(1,1)) - eq_(c0_pre.r, c1_int.r) - eq_(c0_pre.g, c1_int.g) - eq_(c0_pre.b, c1_int.b) - eq_(c0_pre.a, c1_int.a) - c1 = im.get_pixel(1,1,True) - eq_(c0_pre.r, c1.r) - eq_(c0_pre.g, c1.g) - eq_(c0_pre.b, c1.b) - eq_(c0_pre.a, c1.a) - + c1_int = mapnik.Color(im.get_pixel(1, 1)) + assert c0_pre.r == c1_int.r + assert c0_pre.g == c1_int.g + assert c0_pre.b == c1_int.b + assert c0_pre.a == c1_int.a + c1 = im.get_pixel_color(1, 1) + assert c0_pre.r == c1.r + assert c0_pre.g == c1.g + assert c0_pre.b == c1.b + assert c0_pre.a == c1.a + # Now create a new image that is premultiplied - im = mapnik.Image(256,256, mapnik.ImageType.rgba8, True, True) - c0 = mapnik.Color(16,33,255,128) - c0_pre = mapnik.Color(16,33,255,128, True) - im.set_pixel(0,0,c0) - im.set_pixel(1,1,c0_pre) - # It should have put pixels that are the same as premultiplied so premultiply c0 + im = mapnik.Image(256, 256, mapnik.ImageType.rgba8, True, True) + c0 = mapnik.Color(16, 33, 255, 128) + c0_pre = mapnik.Color(16, 33, 255, 128, True) + im.set_pixel(0, 0, c0) + im.set_pixel(1, 1, c0_pre) + # It should have put pixels that are the same as premultiplied so + # premultiply c0 c0.premultiply() - c1_int = mapnik.Color(im.get_pixel(0,0)) - eq_(c0.r, c1_int.r) - eq_(c0.g, c1_int.g) - eq_(c0.b, c1_int.b) - eq_(c0.a, c1_int.a) - c1 = im.get_pixel(0,0,True) - eq_(c0.r, c1.r) - eq_(c0.g, c1.g) - eq_(c0.b, c1.b) - eq_(c0.a, c1.a) + c1_int = mapnik.Color(im.get_pixel(0, 0)) + assert c0.r == c1_int.r + assert c0.g == c1_int.g + assert c0.b == c1_int.b + assert c0.a == c1_int.a + c1 = im.get_pixel_color(0, 0) + assert c0.r == c1.r + assert c0.g == c1.g + assert c0.b == c1.b + assert c0.a == c1.a # The premultiplied Color should be the same though - c1_int = mapnik.Color(im.get_pixel(1,1)) - eq_(c0_pre.r, c1_int.r) - eq_(c0_pre.g, c1_int.g) - eq_(c0_pre.b, c1_int.b) - eq_(c0_pre.a, c1_int.a) - c1 = im.get_pixel(1,1,True) - eq_(c0_pre.r, c1.r) - eq_(c0_pre.g, c1.g) - eq_(c0_pre.b, c1.b) - eq_(c0_pre.a, c1.a) + c1_int = mapnik.Color(im.get_pixel(1, 1)) + assert c0_pre.r == c1_int.r + assert c0_pre.g == c1_int.g + assert c0_pre.b == c1_int.b + assert c0_pre.a == c1_int.a + c1 = im.get_pixel_color(1, 1) + assert c0_pre.r == c1.r + assert c0_pre.g == c1.g + assert c0_pre.b == c1.b + assert c0_pre.a == c1.a + def test_pixel_gray8(): - im = mapnik.Image(4,4,mapnik.ImageType.gray8) + im = mapnik.Image(4, 4, mapnik.ImageType.gray8) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), 0) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == 0 + def test_pixel_gray8s(): - im = mapnik.Image(4,4,mapnik.ImageType.gray8s) + im = mapnik.Image(4, 4, mapnik.ImageType.gray8s) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == -v + def test_pixel_gray16(): - im = mapnik.Image(4,4,mapnik.ImageType.gray16) + im = mapnik.Image(4, 4, mapnik.ImageType.gray16) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), 0) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == 0 + def test_pixel_gray16s(): - im = mapnik.Image(4,4,mapnik.ImageType.gray16s) + im = mapnik.Image(4, 4, mapnik.ImageType.gray16s) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == -v + def test_pixel_gray32(): - im = mapnik.Image(4,4,mapnik.ImageType.gray32) + im = mapnik.Image(4, 4, mapnik.ImageType.gray32) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), 0) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == 0 + def test_pixel_gray32s(): - im = mapnik.Image(4,4,mapnik.ImageType.gray32s) + im = mapnik.Image(4, 4, mapnik.ImageType.gray32s) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == -v + def test_pixel_gray64(): - im = mapnik.Image(4,4,mapnik.ImageType.gray64) + im = mapnik.Image(4, 4, mapnik.ImageType.gray64) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), 0) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == 0 + def test_pixel_gray64s(): - im = mapnik.Image(4,4,mapnik.ImageType.gray64s) + im = mapnik.Image(4, 4, mapnik.ImageType.gray64s) val_list = range(20) for v in val_list: - im.set_pixel(0,0, v) - eq_(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - eq_(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == v + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == -v + def test_pixel_floats(): - im = mapnik.Image(4,4,mapnik.ImageType.gray32f) + im = mapnik.Image(4, 4, mapnik.ImageType.gray32f) val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1] for v in val_list: - im.set_pixel(0,0, v) - assert_almost_equal(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - assert_almost_equal(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == pytest.approx(v) + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == pytest.approx(-v) + def test_pixel_doubles(): - im = mapnik.Image(4,4,mapnik.ImageType.gray64f) + im = mapnik.Image(4, 4, mapnik.ImageType.gray64f) val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1] for v in val_list: - im.set_pixel(0,0, v) - assert_almost_equal(im.get_pixel(0,0), v) - im.set_pixel(0,0, -v) - assert_almost_equal(im.get_pixel(0,0), -v) + im.set_pixel(0, 0, v) + assert im.get_pixel(0, 0) == pytest.approx(v) + im.set_pixel(0, 0, -v) + assert im.get_pixel(0, 0) == pytest.approx(-v) + def test_pixel_overflow(): - im = mapnik.Image(4,4,mapnik.ImageType.gray8) - im.set_pixel(0,0,256) - eq_(im.get_pixel(0,0),255) + im = mapnik.Image(4, 4, mapnik.ImageType.gray8) + im.set_pixel(0, 0, 256) + assert im.get_pixel(0, 0) == 255 + def test_pixel_underflow(): - im = mapnik.Image(4,4,mapnik.ImageType.gray8) - im.set_pixel(0,0,-1) - eq_(im.get_pixel(0,0),0) - im = mapnik.Image(4,4,mapnik.ImageType.gray16) - im.set_pixel(0,0,-1) - eq_(im.get_pixel(0,0),0) - -@raises(IndexError) + im = mapnik.Image(4, 4, mapnik.ImageType.gray8) + im.set_pixel(0, 0, -1) + assert im.get_pixel(0, 0) == 0 + im = mapnik.Image(4, 4, mapnik.ImageType.gray16) + im.set_pixel(0, 0, -1) + assert im.get_pixel(0, 0) == 0 + + def test_set_pixel_out_of_range_1(): - im = mapnik.Image(4,4) - c = mapnik.Color('blue') - im.set_pixel(5,5,c) + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = mapnik.Color('blue') + im.set_pixel(5, 5, c) + -@raises(OverflowError) def test_set_pixel_out_of_range_2(): - im = mapnik.Image(4,4) - c = mapnik.Color('blue') - im.set_pixel(-1,1,c) + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = mapnik.Color('blue') + im.set_pixel(-1, 1, c) + -@raises(IndexError) def test_get_pixel_out_of_range_1(): - im = mapnik.Image(4,4) - c = im.get_pixel(5,5) + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = im.get_pixel(5, 5) + -@raises(OverflowError) def test_get_pixel_out_of_range_2(): - im = mapnik.Image(4,4) - c = im.get_pixel(-1,1) + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = im.get_pixel(-1, 1) + -@raises(IndexError) def test_get_pixel_color_out_of_range_1(): - im = mapnik.Image(4,4) - c = im.get_pixel(5,5,True) + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = im.get_pixel_color(5, 5) + -@raises(OverflowError) def test_get_pixel_color_out_of_range_2(): - im = mapnik.Image(4,4) - c = im.get_pixel(-1,1,True) - + with pytest.raises(IndexError): + im = mapnik.Image(4, 4) + c = im.get_pixel_color(-1, 1) + + def test_set_color_to_alpha(): - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color('rgba(12,12,12,255)')) - eq_(get_unique_colors(im), ['rgba(12,12,12,255)']) + assert get_unique_colors(im), ['rgba(12,12,12 == 255)'] im.set_color_to_alpha(mapnik.Color('rgba(12,12,12,0)')) - eq_(get_unique_colors(im), ['rgba(0,0,0,0)']) + assert get_unique_colors(im), ['rgba(0,0,0 == 0)'] + -@raises(RuntimeError) def test_negative_image_dimensions(): - # TODO - this may have regressed in https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7 - im = mapnik.Image(-40,40) - # should not get here - eq_(im.width(),0) - eq_(im.height(),0) + with pytest.raises(RuntimeError): + # TODO - this may have regressed in + # https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7 + im = mapnik.Image(-40, 40) + # should not get here + assert im.width() == 0 + assert im.height() == 0 + def test_jpeg_round_trip(): filepath = '/tmp/mapnik-jpeg-io.jpeg' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(1,2,3,.5)')) - im.save(filepath,'jpeg') + im.save(filepath, 'jpeg') im2 = mapnik.Image.open(filepath) - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) - eq_(len(im.tostring()),len(im2.tostring())) - eq_(len(im.tostring('jpeg')),len(im2.tostring('jpeg'))) - eq_(len(im.tostring()),len(im3.tostring())) - eq_(len(im.tostring('jpeg')),len(im3.tostring('jpeg'))) + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() + assert len(im.to_string()) == len(im2.to_string()) + assert len(im.to_string('jpeg')) == len(im2.to_string('jpeg')) + assert len(im.to_string()) == len(im3.to_string()) + assert len(im.to_string('jpeg')) == len(im3.to_string('jpeg')) + def test_png_round_trip(): filepath = '/tmp/mapnik-png-io.png' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(1,2,3,.5)')) - im.save(filepath,'png') + im.save(filepath, 'png') im2 = mapnik.Image.open(filepath) - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) - eq_(len(im.tostring()),len(im2.tostring())) - eq_(len(im.tostring('png')),len(im2.tostring('png'))) - eq_(len(im.tostring('png8')),len(im2.tostring('png8'))) - eq_(len(im.tostring()),len(im3.tostring())) - eq_(len(im.tostring('png')),len(im3.tostring('png'))) - eq_(len(im.tostring('png8')),len(im3.tostring('png8'))) + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() + assert len(im.to_string()) == len(im2.to_string()) + assert len(im.to_string('png')) == len(im2.to_string('png')) + assert len(im.to_string('png8')) == len(im2.to_string('png8')) + assert len(im.to_string()) == len(im3.to_string()) + assert len(im.to_string('png')) == len(im3.to_string('png')) + assert len(im.to_string('png8')) == len(im3.to_string('png8')) + def test_image_open_from_string(): filepath = '../data/images/dummy.png' im1 = mapnik.Image.open(filepath) - im2 = mapnik.Image.fromstring(open(filepath,'rb').read()) - eq_(im1.width(),im2.width()) - length = len(im1.tostring()) - eq_(length,len(im2.tostring())) - eq_(len(mapnik.Image.fromstring(im1.tostring('png')).tostring()),length) - eq_(len(mapnik.Image.fromstring(im1.tostring('jpeg')).tostring()),length) - eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('png'))).tostring()),length) - eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('jpeg'))).tostring()),length) + with open(filepath, READ_FLAGS) as f: + im2 = mapnik.Image.from_string(f.read()) + assert im1.width() == im2.width() + length = len(im1.to_string()) + assert length == len(im2.to_string()) + assert len(mapnik.Image.from_string(im1.to_string('png')).to_string()) == length + assert len(mapnik.Image.from_string(im1.to_string('jpeg')).to_string()) == length + assert len(mapnik.Image.from_memoryview(memoryview(im1.to_string('png'))).to_string()) == length + assert len(mapnik.Image.from_memoryview(memoryview(im1.to_string('jpeg'))).to_string()) == length # TODO - https://github.com/mapnik/mapnik/issues/1831 - eq_(len(mapnik.Image.fromstring(im1.tostring('tiff')).tostring()),length) - eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('tiff'))).tostring()),length) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert len(mapnik.Image.from_string(im1.to_string('tiff')).to_string()) == length + assert len(mapnik.Image.from_memoryview(memoryview(im1.to_string('tiff'))).to_string()) == length diff --git a/test/python_tests/image_tiff_test.py b/test/python_tests/image_tiff_test.py index e0535d0ae..492238acc 100644 --- a/test/python_tests/image_tiff_test.py +++ b/test/python_tests/image_tiff_test.py @@ -1,335 +1,357 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik +import os import hashlib -from nose.tools import eq_, assert_not_equal -from utilities import execution_path, run_all - -def hashstr(var): - return hashlib.md5(var).hexdigest() +import mapnik +import pytest +from .utilities import READ_FLAGS, execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + +def hashstr(var): + return hashlib.md5(var).hexdigest() -def test_tiff_round_trip_scanline(): +def test_tiff_round_trip_scanline(setup): filepath = '/tmp/mapnik-tiff-io-scanline.tiff' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(12,255,128,.5)')) - org_str = hashstr(im.tostring()) - im.save(filepath,'tiff:method=scanline') + org_str = hashstr(im.to_string()) + im.save(filepath, 'tiff:method=scanline') im2 = mapnik.Image.open(filepath) - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) - eq_(hashstr(im.tostring()), org_str) - # This won't be the same the first time around because the im is not premultiplied and im2 is - assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring())) - assert_not_equal(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() + assert hashstr(im.to_string()) == org_str + # This won't be the same the first time around because the im is not + # premultiplied and im2 is + assert not hashstr(im.to_string()) == hashstr(im2.to_string()) + assert not hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) # Now premultiply im.premultiply() - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) - eq_(hashstr(im2.tostring()),hashstr(im3.tostring())) - eq_(hashstr(im2.tostring('tiff:method=scanline')),hashstr(im3.tostring('tiff:method=scanline'))) + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) + assert hashstr(im2.to_string()) == hashstr(im3.to_string()) + assert hashstr(im2.to_string('tiff:method=scanline')) == hashstr(im3.to_string('tiff:method=scanline')) + def test_tiff_round_trip_stripped(): filepath = '/tmp/mapnik-tiff-io-stripped.tiff' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(12,255,128,.5)')) - org_str = hashstr(im.tostring()) - im.save(filepath,'tiff:method=stripped') + org_str = hashstr(im.to_string()) + im.save(filepath, 'tiff:method=stripped') im2 = mapnik.Image.open(filepath) - im2.save('/tmp/mapnik-tiff-io-stripped2.tiff','tiff:method=stripped') - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) + im2.save('/tmp/mapnik-tiff-io-stripped2.tiff', 'tiff:method=stripped') + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the # difference in tags. - assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring())) - assert_not_equal(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) + assert not hashstr(im.to_string()) == hashstr(im2.to_string()) + assert not hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) # Now if we premultiply they will be exactly the same im.premultiply() - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) - eq_(hashstr(im2.tostring()),hashstr(im3.tostring())) - # Both of these started out premultiplied, so this round trip should be exactly the same! - eq_(hashstr(im2.tostring('tiff:method=stripped')),hashstr(im3.tostring('tiff:method=stripped'))) + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) + assert hashstr(im2.to_string()) == hashstr(im3.to_string()) + # Both of these started out premultiplied, so this round trip should be + # exactly the same! + assert hashstr(im2.to_string('tiff:method=stripped')) == hashstr(im3.to_string('tiff:method=stripped')) + def test_tiff_round_trip_rows_stripped(): filepath = '/tmp/mapnik-tiff-io-rows_stripped.tiff' filepath2 = '/tmp/mapnik-tiff-io-rows_stripped2.tiff' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(12,255,128,.5)')) - c = im.get_pixel(0,0,True) - eq_(c.r, 12) - eq_(c.g, 255) - eq_(c.b, 128) - eq_(c.a, 128) - eq_(c.get_premultiplied(), False) - im.save(filepath,'tiff:method=stripped:rows_per_strip=8') + c = im.get_pixel_color(0, 0) + assert c.r == 12 + assert c.g == 255 + assert c.b == 128 + assert c.a == 128 + assert c.get_premultiplied() == False + im.save(filepath, 'tiff:method=stripped:rows_per_strip=8') im2 = mapnik.Image.open(filepath) - c2 = im2.get_pixel(0,0,True) - eq_(c2.r, 6) - eq_(c2.g, 128) - eq_(c2.b, 64) - eq_(c2.a, 128) - eq_(c2.get_premultiplied(), True) - im2.save(filepath2,'tiff:method=stripped:rows_per_strip=8') - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) + c2 = im2.get_pixel_color(0, 0) + assert c2.r == 6 + assert c2.g == 128 + assert c2.b == 64 + assert c2.a == 128 + assert c2.get_premultiplied() == True + im2.save(filepath2, 'tiff:method=stripped:rows_per_strip=8') + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the - # difference in tags. - assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring())) - assert_not_equal(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8'))) + # difference in tags. + assert not hashstr(im.to_string()) == hashstr(im2.to_string()) + assert not hashstr(im.to_string('tiff:method=stripped:rows_per_strip=8')) == hashstr( + im2.to_string('tiff:method=stripped:rows_per_strip=8')) # Now premultiply the first image and they will be the same! im.premultiply() - eq_(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8'))) - eq_(hashstr(im2.tostring()),hashstr(im3.tostring())) - # Both of these started out premultiplied, so this round trip should be exactly the same! - eq_(hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im3.tostring('tiff:method=stripped:rows_per_strip=8'))) + assert hashstr(im.to_string('tiff:method=stripped:rows_per_strip=8')) == hashstr(im2.to_string('tiff:method=stripped:rows_per_strip=8')) + assert hashstr(im2.to_string()) == hashstr(im3.to_string()) + # Both of these started out premultiplied, so this round trip should be + # exactly the same! + assert hashstr(im2.to_string('tiff:method=stripped:rows_per_strip=8')) == hashstr(im3.to_string('tiff:method=stripped:rows_per_strip=8')) + def test_tiff_round_trip_buffered_tiled(): filepath = '/tmp/mapnik-tiff-io-buffered-tiled.tiff' filepath2 = '/tmp/mapnik-tiff-io-buffered-tiled2.tiff' filepath3 = '/tmp/mapnik-tiff-io-buffered-tiled3.tiff' - im = mapnik.Image(255,267) + im = mapnik.Image(255, 267) im.fill(mapnik.Color('rgba(33,255,128,.5)')) - c = im.get_pixel(0,0,True) - eq_(c.r, 33) - eq_(c.g, 255) - eq_(c.b, 128) - eq_(c.a, 128) - eq_(c.get_premultiplied(), False) - im.save(filepath,'tiff:method=tiled:tile_width=32:tile_height=32') + c = im.get_pixel_color(0, 0) + assert c.r == 33 + assert c.g == 255 + assert c.b == 128 + assert c.a == 128 + assert not c.get_premultiplied() + im.save(filepath, 'tiff:method=tiled:tile_width=32:tile_height=32') im2 = mapnik.Image.open(filepath) - c2 = im2.get_pixel(0,0,True) - eq_(c2.r, 17) - eq_(c2.g, 128) - eq_(c2.b, 64) - eq_(c2.a, 128) - eq_(c2.get_premultiplied(), True) - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) + c2 = im2.get_pixel_color(0, 0) + assert c2.r == 17 + assert c2.g == 128 + assert c2.b == 64 + assert c2.a == 128 + assert c2.get_premultiplied() + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) im2.save(filepath2, 'tiff:method=tiled:tile_width=32:tile_height=32') im3.save(filepath3, 'tiff:method=tiled:tile_width=32:tile_height=32') - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the # difference in tags. - assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring())) - assert_not_equal(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))) + assert not hashstr(im.to_string()) == hashstr(im2.to_string()) + assert not hashstr(im.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) == hashstr( + im2.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) # Now premultiply the first image and they should be the same im.premultiply() - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))) - eq_(hashstr(im2.tostring()),hashstr(im3.tostring())) - # Both of these started out premultiplied, so this round trip should be exactly the same! - eq_(hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im3.tostring('tiff:method=tiled:tile_width=32:tile_height=32'))) + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) == hashstr(im2.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) + assert hashstr(im2.to_string()) == hashstr(im3.to_string()) + # Both of these started out premultiplied, so this round trip should be + # exactly the same! + assert hashstr(im2.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) == hashstr(im3.to_string('tiff:method=tiled:tile_width=32:tile_height=32')) + def test_tiff_round_trip_tiled(): filepath = '/tmp/mapnik-tiff-io-tiled.tiff' - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color('rgba(1,255,128,.5)')) - im.save(filepath,'tiff:method=tiled') + im.save(filepath, 'tiff:method=tiled') im2 = mapnik.Image.open(filepath) - im3 = mapnik.Image.fromstring(open(filepath,'r').read()) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(im.width(),im3.width()) - eq_(im.height(),im3.height()) + with open(filepath, READ_FLAGS) as f: + im3 = mapnik.Image.from_string(f.read()) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert im.width() == im3.width() + assert im.height() == im3.height() # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the # difference in tags. - assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring())) - assert_not_equal(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) + assert not hashstr(im.to_string()) == hashstr(im2.to_string()) + assert not hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) # Now premultiply the first image and they will be exactly the same. im.premultiply() - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) - eq_(hashstr(im2.tostring()),hashstr(im3.tostring())) - # Both of these started out premultiplied, so this round trip should be exactly the same! - eq_(hashstr(im2.tostring('tiff:method=tiled')),hashstr(im3.tostring('tiff:method=tiled'))) + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) + assert hashstr(im2.to_string()) == hashstr(im3.to_string()) + # Both of these started out premultiplied, so this round trip should be + # exactly the same! + assert hashstr(im2.to_string('tiff:method=tiled')) == hashstr(im3.to_string('tiff:method=tiled')) def test_tiff_rgb8_compare(): filepath1 = '../data/tiff/ndvi_256x256_rgb8_striped.tif' filepath2 = '/tmp/mapnik-tiff-rgb8.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff') + im.save(filepath2, 'tiff') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff')),hashstr(im2.tostring('tiff'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff')) == hashstr(im2.to_string('tiff')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).to_string("tiff")) + def test_tiff_rgba8_compare_scanline(): filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif' filepath2 = '/tmp/mapnik-tiff-rgba8-scanline.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=scanline') + im.save(filepath2, 'tiff:method=scanline') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).to_string("tiff")) + def test_tiff_rgba8_compare_stripped(): filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif' filepath2 = '/tmp/mapnik-tiff-rgba8-stripped.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=stripped') + im.save(filepath2, 'tiff:method=stripped') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).to_string("tiff")) + def test_tiff_rgba8_compare_tiled(): filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif' - filepath2 = '/tmp/mapnik-tiff-rgba8-stripped.tiff' + filepath2 = '/tmp/mapnik-tiff-rgba8-tiled.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=tiled') + im.save(filepath2, 'tiff:method=tiled') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.rgba8).to_string("tiff")) + def test_tiff_gray8_compare_scanline(): filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray8-scanline.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=scanline') + im.save(filepath2, 'tiff:method=scanline') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).to_string("tiff")) def test_tiff_gray8_compare_stripped(): filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray8-stripped.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=stripped') + im.save(filepath2, 'tiff:method=stripped') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).to_string("tiff")) + def test_tiff_gray8_compare_tiled(): filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray8-tiled.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=tiled') + im.save(filepath2, 'tiff:method=tiled') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray8).to_string("tiff")) + def test_tiff_gray16_compare_scanline(): filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray16-scanline.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=scanline') + im.save(filepath2, 'tiff:method=scanline') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).to_string("tiff")) def test_tiff_gray16_compare_stripped(): filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray16-stripped.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=stripped') + im.save(filepath2, 'tiff:method=stripped') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).to_string("tiff")) + def test_tiff_gray16_compare_tiled(): filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray16-tiled.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=tiled') + im.save(filepath2, 'tiff:method=tiled') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray16).to_string("tiff")) + def test_tiff_gray32f_compare_scanline(): filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray32f-scanline.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=scanline') + im.save(filepath2, 'tiff:method=scanline') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=scanline')) == hashstr(im2.to_string('tiff:method=scanline')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).to_string("tiff")) + def test_tiff_gray32f_compare_stripped(): filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray32f-stripped.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=stripped') + im.save(filepath2, 'tiff:method=stripped') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=stripped')) == hashstr(im2.to_string('tiff:method=stripped')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).to_string("tiff")) + def test_tiff_gray32f_compare_tiled(): filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif' filepath2 = '/tmp/mapnik-tiff-gray32f-tiled.tiff' im = mapnik.Image.open(filepath1) - im.save(filepath2,'tiff:method=tiled') + im.save(filepath2, 'tiff:method=tiled') im2 = mapnik.Image.open(filepath2) - eq_(im.width(),im2.width()) - eq_(im.height(),im2.height()) - eq_(hashstr(im.tostring()),hashstr(im2.tostring())) - eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled'))) + assert im.width() == im2.width() + assert im.height() == im2.height() + assert hashstr(im.to_string()) == hashstr(im2.to_string()) + assert hashstr(im.to_string('tiff:method=tiled')) == hashstr(im2.to_string('tiff:method=tiled')) # should not be a blank image - eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert hashstr(im.to_string("tiff")) != hashstr(mapnik.Image(im.width(), im.height(), mapnik.ImageType.gray32f).to_string("tiff")) diff --git a/test/python_tests/images/pycairo/cairo-cairo-expected.pdf b/test/python_tests/images/pycairo/cairo-cairo-expected.pdf index 220a9b210..2d2d0dad9 100644 Binary files a/test/python_tests/images/pycairo/cairo-cairo-expected.pdf and b/test/python_tests/images/pycairo/cairo-cairo-expected.pdf differ diff --git a/test/python_tests/images/pycairo/cairo-cairo-expected.png b/test/python_tests/images/pycairo/cairo-cairo-expected.png index 3a99f5e73..687440c24 100644 Binary files a/test/python_tests/images/pycairo/cairo-cairo-expected.png and b/test/python_tests/images/pycairo/cairo-cairo-expected.png differ diff --git a/test/python_tests/images/pycairo/cairo-cairo-expected.svg b/test/python_tests/images/pycairo/cairo-cairo-expected.svg index 18d73432d..c120f58ed 100644 --- a/test/python_tests/images/pycairo/cairo-cairo-expected.svg +++ b/test/python_tests/images/pycairo/cairo-cairo-expected.svg @@ -3,45 +3,48 @@ - + - + - + - + - + - + - + - + + + + - + - - - - - - - - - + + + + + + + + + - + diff --git a/test/python_tests/images/pycairo/pdf-printing-expected.pdf b/test/python_tests/images/pycairo/pdf-printing-expected.pdf new file mode 100644 index 000000000..bf300ba05 Binary files /dev/null and b/test/python_tests/images/pycairo/pdf-printing-expected.pdf differ diff --git a/test/python_tests/images/style-comp-op/color.png b/test/python_tests/images/style-comp-op/color.png index 81dae902b..662b4728d 100644 Binary files a/test/python_tests/images/style-comp-op/color.png and b/test/python_tests/images/style-comp-op/color.png differ diff --git a/test/python_tests/images/style-image-filter/agg-stack-blur22.png b/test/python_tests/images/style-image-filter/agg-stack-blur22.png index 1d1b7ca0c..b8226452e 100644 Binary files a/test/python_tests/images/style-image-filter/agg-stack-blur22.png and b/test/python_tests/images/style-image-filter/agg-stack-blur22.png differ diff --git a/test/python_tests/images/style-image-filter/blur.png b/test/python_tests/images/style-image-filter/blur.png index ec6fc7f7d..29b72bfc3 100644 Binary files a/test/python_tests/images/style-image-filter/blur.png and b/test/python_tests/images/style-image-filter/blur.png differ diff --git a/test/python_tests/images/style-image-filter/edge-detect.png b/test/python_tests/images/style-image-filter/edge-detect.png index 74eff4dfc..2c7cb1466 100644 Binary files a/test/python_tests/images/style-image-filter/edge-detect.png and b/test/python_tests/images/style-image-filter/edge-detect.png differ diff --git a/test/python_tests/images/style-image-filter/emboss.png b/test/python_tests/images/style-image-filter/emboss.png index bf74d99d0..d0cb71c39 100644 Binary files a/test/python_tests/images/style-image-filter/emboss.png and b/test/python_tests/images/style-image-filter/emboss.png differ diff --git a/test/python_tests/images/style-image-filter/gray.png b/test/python_tests/images/style-image-filter/gray.png index 7ee05f520..7ed3982ec 100644 Binary files a/test/python_tests/images/style-image-filter/gray.png and b/test/python_tests/images/style-image-filter/gray.png differ diff --git a/test/python_tests/images/style-image-filter/invert.png b/test/python_tests/images/style-image-filter/invert.png index 52bcf950c..08d3d22f9 100644 Binary files a/test/python_tests/images/style-image-filter/invert.png and b/test/python_tests/images/style-image-filter/invert.png differ diff --git a/test/python_tests/images/style-image-filter/none.png b/test/python_tests/images/style-image-filter/none.png index 245966de7..55d3d42cb 100644 Binary files a/test/python_tests/images/style-image-filter/none.png and b/test/python_tests/images/style-image-filter/none.png differ diff --git a/test/python_tests/images/style-image-filter/sharpen.png b/test/python_tests/images/style-image-filter/sharpen.png index 8599186a0..592b9f650 100644 Binary files a/test/python_tests/images/style-image-filter/sharpen.png and b/test/python_tests/images/style-image-filter/sharpen.png differ diff --git a/test/python_tests/images/style-image-filter/sobel.png b/test/python_tests/images/style-image-filter/sobel.png index c1b709253..f7061b378 100644 Binary files a/test/python_tests/images/style-image-filter/sobel.png and b/test/python_tests/images/style-image-filter/sobel.png differ diff --git a/test/python_tests/images/style-image-filter/x-gradient.png b/test/python_tests/images/style-image-filter/x-gradient.png index fdc5f74d9..125bb0169 100644 Binary files a/test/python_tests/images/style-image-filter/x-gradient.png and b/test/python_tests/images/style-image-filter/x-gradient.png differ diff --git a/test/python_tests/images/style-image-filter/y-gradient.png b/test/python_tests/images/style-image-filter/y-gradient.png index b84a491ce..88b0be877 100644 Binary files a/test/python_tests/images/style-image-filter/y-gradient.png and b/test/python_tests/images/style-image-filter/y-gradient.png differ diff --git a/test/python_tests/images/support/dataraster_coloring.png b/test/python_tests/images/support/dataraster_coloring.png index da3cac4b1..c42d1dbfe 100644 Binary files a/test/python_tests/images/support/dataraster_coloring.png and b/test/python_tests/images/support/dataraster_coloring.png differ diff --git a/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=0.webp b/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=0.webp index f0f3838ca..209cf47d5 100644 Binary files a/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=0.webp and b/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=0.webp differ diff --git a/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=6.webp b/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=6.webp index be253e265..56c90f2ef 100644 Binary files a/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=6.webp and b/test/python_tests/images/support/encoding-opts/aerial_rgba-webp+method=6.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+alpha_filtering=2.webp b/test/python_tests/images/support/encoding-opts/blank-webp+alpha_filtering=2.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+alpha_filtering=2.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+alpha_filtering=2.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+alpha_quality=50.webp b/test/python_tests/images/support/encoding-opts/blank-webp+alpha_quality=50.webp index 10cea1cdf..7a73677cc 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+alpha_quality=50.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+alpha_quality=50.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+autofilter=0.webp b/test/python_tests/images/support/encoding-opts/blank-webp+autofilter=0.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+autofilter=0.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+autofilter=0.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+filter_sharpness=4.webp b/test/python_tests/images/support/encoding-opts/blank-webp+filter_sharpness=4.webp index 932a4dec0..d95307bf0 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+filter_sharpness=4.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+filter_sharpness=4.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+filter_strength=50.webp b/test/python_tests/images/support/encoding-opts/blank-webp+filter_strength=50.webp index 2e65b9b85..053ef81eb 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+filter_strength=50.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+filter_strength=50.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+filter_type=1+autofilter=1.webp b/test/python_tests/images/support/encoding-opts/blank-webp+filter_type=1+autofilter=1.webp index 7e3bd76b5..1375c7e9f 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+filter_type=1+autofilter=1.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+filter_type=1+autofilter=1.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+method=0.webp b/test/python_tests/images/support/encoding-opts/blank-webp+method=0.webp index 5c6492427..ed98c5905 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+method=0.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+method=0.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+method=6.webp b/test/python_tests/images/support/encoding-opts/blank-webp+method=6.webp index ef84f4c33..05413d25b 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+method=6.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+method=6.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+partition_limit=50.webp b/test/python_tests/images/support/encoding-opts/blank-webp+partition_limit=50.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+partition_limit=50.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+partition_limit=50.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+partitions=3.webp b/test/python_tests/images/support/encoding-opts/blank-webp+partitions=3.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+partitions=3.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+partitions=3.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+pass=10.webp b/test/python_tests/images/support/encoding-opts/blank-webp+pass=10.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+pass=10.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+pass=10.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+preprocessing=1.webp b/test/python_tests/images/support/encoding-opts/blank-webp+preprocessing=1.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+preprocessing=1.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+preprocessing=1.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+quality=64.webp b/test/python_tests/images/support/encoding-opts/blank-webp+quality=64.webp index 0eb26aa1a..5e0ba88e1 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+quality=64.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+quality=64.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+segments=3.webp b/test/python_tests/images/support/encoding-opts/blank-webp+segments=3.webp index af3082b71..039cf25f4 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+segments=3.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+segments=3.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+sns_strength=50.webp b/test/python_tests/images/support/encoding-opts/blank-webp+sns_strength=50.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+sns_strength=50.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+sns_strength=50.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+target_PSNR=.5.webp b/test/python_tests/images/support/encoding-opts/blank-webp+target_PSNR=.5.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+target_PSNR=.5.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+target_PSNR=.5.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp+target_size=100.webp b/test/python_tests/images/support/encoding-opts/blank-webp+target_size=100.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp+target_size=100.webp and b/test/python_tests/images/support/encoding-opts/blank-webp+target_size=100.webp differ diff --git a/test/python_tests/images/support/encoding-opts/blank-webp.webp b/test/python_tests/images/support/encoding-opts/blank-webp.webp index a7369dcb9..02090121e 100644 Binary files a/test/python_tests/images/support/encoding-opts/blank-webp.webp and b/test/python_tests/images/support/encoding-opts/blank-webp.webp differ diff --git a/test/python_tests/images/support/encoding-opts/solid-webp+method=6.webp b/test/python_tests/images/support/encoding-opts/solid-webp+method=6.webp index 5a76594f6..02e010694 100644 Binary files a/test/python_tests/images/support/encoding-opts/solid-webp+method=6.webp and b/test/python_tests/images/support/encoding-opts/solid-webp+method=6.webp differ diff --git a/test/python_tests/images/support/mapnik-marker-ellipse-render1.png b/test/python_tests/images/support/mapnik-marker-ellipse-render1.png index 7854c5621..e7f12b494 100644 Binary files a/test/python_tests/images/support/mapnik-marker-ellipse-render1.png and b/test/python_tests/images/support/mapnik-marker-ellipse-render1.png differ diff --git a/test/python_tests/images/support/mapnik-marker-ellipse-render2.png b/test/python_tests/images/support/mapnik-marker-ellipse-render2.png index c2a4963c3..d1d0d2215 100644 Binary files a/test/python_tests/images/support/mapnik-marker-ellipse-render2.png and b/test/python_tests/images/support/mapnik-marker-ellipse-render2.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-0.1.png b/test/python_tests/images/support/marker-text-line-scale-factor-0.1.png index 306424dfc..286fa9caa 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-0.1.png and b/test/python_tests/images/support/marker-text-line-scale-factor-0.1.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-0.899.png b/test/python_tests/images/support/marker-text-line-scale-factor-0.899.png index cb2c651f8..aac9cb89d 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-0.899.png and b/test/python_tests/images/support/marker-text-line-scale-factor-0.899.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-1.5.png b/test/python_tests/images/support/marker-text-line-scale-factor-1.5.png index a1b34a357..12f283702 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-1.5.png and b/test/python_tests/images/support/marker-text-line-scale-factor-1.5.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-1.png b/test/python_tests/images/support/marker-text-line-scale-factor-1.png index c86c5fa4e..08bdf04bb 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-1.png and b/test/python_tests/images/support/marker-text-line-scale-factor-1.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-10.png b/test/python_tests/images/support/marker-text-line-scale-factor-10.png index 8a7842f9e..bd25f5852 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-10.png and b/test/python_tests/images/support/marker-text-line-scale-factor-10.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-1e-05.png b/test/python_tests/images/support/marker-text-line-scale-factor-1e-05.png index 0f368308a..e9f9e7f84 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-1e-05.png and b/test/python_tests/images/support/marker-text-line-scale-factor-1e-05.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-2.png b/test/python_tests/images/support/marker-text-line-scale-factor-2.png index e3ad67f99..058ed4a4f 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-2.png and b/test/python_tests/images/support/marker-text-line-scale-factor-2.png differ diff --git a/test/python_tests/images/support/marker-text-line-scale-factor-5.png b/test/python_tests/images/support/marker-text-line-scale-factor-5.png index 2be5f2df3..7122e287b 100644 Binary files a/test/python_tests/images/support/marker-text-line-scale-factor-5.png and b/test/python_tests/images/support/marker-text-line-scale-factor-5.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_16bsi_subquery-16BSI-135.png b/test/python_tests/images/support/pgraster/data_subquery-data_16bsi_subquery-16BSI-135.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_16bsi_subquery-16BSI-135.png and b/test/python_tests/images/support/pgraster/data_subquery-data_16bsi_subquery-16BSI-135.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_16bui_subquery-16BUI-126.png b/test/python_tests/images/support/pgraster/data_subquery-data_16bui_subquery-16BUI-126.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_16bui_subquery-16BUI-126.png and b/test/python_tests/images/support/pgraster/data_subquery-data_16bui_subquery-16BUI-126.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_2bui_subquery-2BUI-3.png b/test/python_tests/images/support/pgraster/data_subquery-data_2bui_subquery-2BUI-3.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_2bui_subquery-2BUI-3.png and b/test/python_tests/images/support/pgraster/data_subquery-data_2bui_subquery-2BUI-3.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_32bf_subquery-32BF-450.png b/test/python_tests/images/support/pgraster/data_subquery-data_32bf_subquery-32BF-450.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_32bf_subquery-32BF-450.png and b/test/python_tests/images/support/pgraster/data_subquery-data_32bf_subquery-32BF-450.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_32bsi_subquery-32BSI-264.png b/test/python_tests/images/support/pgraster/data_subquery-data_32bsi_subquery-32BSI-264.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_32bsi_subquery-32BSI-264.png and b/test/python_tests/images/support/pgraster/data_subquery-data_32bsi_subquery-32BSI-264.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_32bui_subquery-32BUI-255.png b/test/python_tests/images/support/pgraster/data_subquery-data_32bui_subquery-32BUI-255.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_32bui_subquery-32BUI-255.png and b/test/python_tests/images/support/pgraster/data_subquery-data_32bui_subquery-32BUI-255.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_4bui_subquery-4BUI-15.png b/test/python_tests/images/support/pgraster/data_subquery-data_4bui_subquery-4BUI-15.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_4bui_subquery-4BUI-15.png and b/test/python_tests/images/support/pgraster/data_subquery-data_4bui_subquery-4BUI-15.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_64bf_subquery-64BF-3072.png b/test/python_tests/images/support/pgraster/data_subquery-data_64bf_subquery-64BF-3072.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_64bf_subquery-64BF-3072.png and b/test/python_tests/images/support/pgraster/data_subquery-data_64bf_subquery-64BF-3072.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_8bsi_subquery-8BSI-69.png b/test/python_tests/images/support/pgraster/data_subquery-data_8bsi_subquery-8BSI-69.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_8bsi_subquery-8BSI-69.png and b/test/python_tests/images/support/pgraster/data_subquery-data_8bsi_subquery-8BSI-69.png differ diff --git a/test/python_tests/images/support/pgraster/data_subquery-data_8bui_subquery-8BUI-63.png b/test/python_tests/images/support/pgraster/data_subquery-data_8bui_subquery-8BUI-63.png index e6fad0d0b..38be9e295 100644 Binary files a/test/python_tests/images/support/pgraster/data_subquery-data_8bui_subquery-8BUI-63.png and b/test/python_tests/images/support/pgraster/data_subquery-data_8bui_subquery-8BUI-63.png differ diff --git a/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64 Cl--1-box1.png b/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64 Cl--1-box1.png index cae620513..2d8149d1d 100644 Binary files a/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64 Cl--1-box1.png and b/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64 Cl--1-box1.png differ diff --git a/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64--0-box1.png b/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64--0-box1.png index cae620513..2d8149d1d 100644 Binary files a/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64--0-box1.png and b/test/python_tests/images/support/pgraster/rgba_8bui-nodataedge-rgb_8bui C T_64x64--0-box1.png differ diff --git a/test/python_tests/images/support/raster_warping.png b/test/python_tests/images/support/raster_warping.png index 7a6dea7c0..83f68213c 100644 Binary files a/test/python_tests/images/support/raster_warping.png and b/test/python_tests/images/support/raster_warping.png differ diff --git a/test/python_tests/images/support/raster_warping_does_not_overclip_source.png b/test/python_tests/images/support/raster_warping_does_not_overclip_source.png index 0d6ccc5ab..179ea2d0a 100644 Binary files a/test/python_tests/images/support/raster_warping_does_not_overclip_source.png and b/test/python_tests/images/support/raster_warping_does_not_overclip_source.png differ diff --git a/test/python_tests/images/support/transparency/white0.webp b/test/python_tests/images/support/transparency/white0.webp index f276b81ee..2f0baac52 100644 Binary files a/test/python_tests/images/support/transparency/white0.webp and b/test/python_tests/images/support/transparency/white0.webp differ diff --git a/test/python_tests/introspection_test.py b/test/python_tests/introspection_test.py index afb1cc2c9..ccd9a5208 100644 --- a/test/python_tests/introspection_test.py +++ b/test/python_tests/introspection_test.py @@ -1,40 +1,40 @@ -#!/usr/bin/env python - import os -from nose.tools import eq_ -from utilities import execution_path, run_all - import mapnik +import pytest +from .utilities import execution_path + +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_introspect_symbolizers(): +def test_introspect_symbolizers(setup): # create a symbolizer p = mapnik.PointSymbolizer() p.file = "../data/images/dummy.png" p.allow_overlap = True p.opacity = 0.5 - eq_(p.allow_overlap, True) - eq_(p.opacity, 0.5) - eq_(p.filename,'../data/images/dummy.png') + assert p.allow_overlap == True + assert p.opacity == 0.5 + assert str(p.file) == '../data/images/dummy.png' # make sure the defaults # are what we think they are - eq_(p.allow_overlap, True) - eq_(p.opacity,0.5) - eq_(p.filename,'../data/images/dummy.png') + assert p.allow_overlap == True + assert p.opacity == 0.5 + assert str(p.file) == '../data/images/dummy.png' # contruct objects to hold it r = mapnik.Rule() - r.symbols.append(p) + r.symbolizers.append(p) s = mapnik.Style() s.rules.append(r) - m = mapnik.Map(0,0) - m.append_style('s',s) + m = mapnik.Map(0, 0) + m.append_style('s', s) # try to figure out what is # in the map and make sure @@ -42,20 +42,16 @@ def test_introspect_symbolizers(): s2 = m.find_style('s') rules = s2.rules - eq_(len(rules),1) + assert len(rules) == 1 r2 = rules[0] - syms = r2.symbols - eq_(len(syms),1) + syms = r2.symbolizers + assert len(syms) == 1 - ## TODO here, we can do... + # TODO here, we can do... sym = syms[0] p2 = sym.extract() - assert isinstance(p2,mapnik.PointSymbolizer) - - eq_(p2.allow_overlap, True) - eq_(p2.opacity, 0.5) - eq_(p2.filename,'../data/images/dummy.png') + assert isinstance(p2, mapnik.PointSymbolizer) -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert p2.allow_overlap == True + assert p2.opacity == 0.5 + assert str(p2.file) == '../data/images/dummy.png' diff --git a/test/python_tests/json_feature_properties_test.py b/test/python_tests/json_feature_properties_test.py index 47f2428d4..7e7bb9a25 100644 --- a/test/python_tests/json_feature_properties_test.py +++ b/test/python_tests/json_feature_properties_test.py @@ -1,80 +1,76 @@ -#encoding: utf8 - -from nose.tools import eq_ import mapnik -from utilities import run_all try: import json except ImportError: import simplejson as json chars = [ - { - "name":"single_quote", - "test": "string with ' quote", - "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}' - }, - { - "name":"escaped_single_quote", - "test":"string with \' quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}' - }, - { - "name":"double_quote", - "test":'string with " quote', - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}' - }, - { - "name":"double_quote2", - "test":"string with \" quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}' - }, - { - "name":"reverse_solidus", # backslash - "test":"string with \\ quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\ quote"}}' - }, - { - "name":"solidus", # forward slash - "test":"string with / quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with / quote"}}' - }, - { - "name":"backspace", - "test":"string with \b quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\b quote"}}' - }, - { - "name":"formfeed", - "test":"string with \f quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\f quote"}}' - }, - { - "name":"newline", - "test":"string with \n quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\n quote"}}' - }, - { - "name":"carriage_return", - "test":"string with \r quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\r quote"}}' - }, - { - "name":"horiztonal_tab", - "test":"string with \t quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\t quote"}}' - }, - # remainder are c++ reserved, but not json - { - "name":"vert_tab", - "test":"string with \v quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u000b quote"}}' - }, - { - "name":"alert", - "test":"string with \a quote", - "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \u0007 quote"}}' - } + { + "name": "single_quote", + "test": "string with ' quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}' + }, + { + "name": "escaped_single_quote", + "test": "string with \' quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}' + }, + { + "name": "double_quote", + "test": 'string with " quote', + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}' + }, + { + "name": "double_quote2", + "test": "string with \" quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}' + }, + # { + # "name": "reverse_solidus", # backslash + # "test": "string with \\ quote", + # "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\ quote"}}' + # }, + { + "name": "solidus", # forward slash + "test": "string with / quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with / quote"}}' + }, + { + "name": "backspace", + "test": "string with \b quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\b quote"}}' + }, + { + "name": "formfeed", + "test": "string with \f quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\f quote"}}' + }, + { + "name": "newline", + "test": "string with \n quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\n quote"}}' + }, + { + "name": "carriage_return", + "test": "string with \r quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\r quote"}}' + }, + { + "name": "horiztonal_tab", + "test": "string with \t quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\t quote"}}' + }, + # remainder are c++ reserved, but not json + { + "name": "vert_tab", + "test": "string with \v quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u000b quote"}}' + }, + { + "name": "alert", + "test": "string with \a quote", + "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u0007 quote"}}' + } ] ctx = mapnik.Context() @@ -82,21 +78,18 @@ def test_char_escaping(): for char in chars: - feat = mapnik.Feature(ctx,1) + feat = mapnik.Feature(ctx, 1) expected = char['test'] feat["name"] = expected - eq_(feat["name"],expected) + assert feat["name"] == expected # confirm the python json module # is working as we would expect pyjson2 = json.loads(char['json']) - eq_(pyjson2['properties']['name'],expected) + assert pyjson2['properties']['name'] == expected # confirm our behavior is the same as python json module # for the original string geojson_feat_string = feat.to_geojson() - eq_(geojson_feat_string,char['json'],"Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" % (geojson_feat_string,char['json'],char['name'])) + assert geojson_feat_string == char['json'], "Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" % (geojson_feat_string, char['json'], char['name']) # and the round tripped string pyjson = json.loads(geojson_feat_string) - eq_(pyjson['properties']['name'],expected) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert pyjson['properties']['name'] == expected diff --git a/test/python_tests/layer_buffer_size_test.py b/test/python_tests/layer_buffer_size_test.py index 83765a73f..c56701fa4 100644 --- a/test/python_tests/layer_buffer_size_test.py +++ b/test/python_tests/layer_buffer_size_test.py @@ -1,35 +1,32 @@ -#coding=utf8 import os import mapnik -from utilities import execution_path, run_all -from nose.tools import eq_ +import pytest +from .utilities import execution_path + +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'sqlite' in mapnik.DatasourceCache.plugin_names(): # the negative buffer on the layer should # override the postive map buffer leading # only one point to be rendered in the map - def test_layer_buffer_size_1(): - m = mapnik.Map(512,512) - eq_(m.buffer_size,0) - mapnik.load_map(m,'../data/good_maps/layer_buffer_size_reduction.xml') - eq_(m.buffer_size,256) - eq_(m.layers[0].buffer_size,-150) + def test_layer_buffer_size_1(setup): + m = mapnik.Map(512, 512) + assert m.buffer_size == 0 + mapnik.load_map(m, '../data/good_maps/layer_buffer_size_reduction.xml') + assert m.buffer_size == 256 + assert m.layers[0].buffer_size == -150 m.zoom_all() - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) actual = '/tmp/mapnik-layer-buffer-size.png' expected = 'images/support/mapnik-layer-buffer-size.png' - im.save(actual,"png32") + im.save(actual, "png32") expected_im = mapnik.Image.open(expected) - eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected)) - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im.to_string('png32') == expected_im.to_string('png32'),'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/' + expected) diff --git a/test/python_tests/layer_modification_test.py b/test/python_tests/layer_modification_test.py index 7517ac2ac..bf01d634c 100644 --- a/test/python_tests/layer_modification_test.py +++ b/test/python_tests/layer_modification_test.py @@ -1,16 +1,17 @@ -#!/usr/bin/env python - import os -from nose.tools import eq_ -from utilities import execution_path, run_all import mapnik +import pytest + +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_adding_datasource_to_layer(): +def test_adding_datasource_to_layer(setup): map_string = ''' @@ -34,9 +35,9 @@ def test_adding_datasource_to_layer(): mapnik.load_map_from_string(m, map_string) # validate it loaded fine - eq_(m.layers[0].styles[0],'world_borders_style') - eq_(m.layers[0].styles[1],'point_style') - eq_(len(m.layers),1) + assert m.layers[0].styles[0] == 'world_borders_style' + assert m.layers[0].styles[1] == 'point_style' + assert len(m.layers) == 1 # also assign a variable reference to that layer # below we will test that this variable references @@ -44,32 +45,29 @@ def test_adding_datasource_to_layer(): lyr = m.layers[0] # ensure that there was no datasource for the layer... - eq_(m.layers[0].datasource,None) - eq_(lyr.datasource,None) + assert m.layers[0].datasource == None + assert lyr.datasource == None # also note that since the srs was black it defaulted to wgs84 - eq_(m.layers[0].srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') - eq_(lyr.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') + assert m.layers[0].srs == 'epsg:4326' + assert lyr.srs == 'epsg:4326' # now add a datasource one... ds = mapnik.Shapefile(file='../data/shp/world_merc.shp') m.layers[0].datasource = ds # now ensure it is attached - eq_(m.layers[0].datasource.describe()['name'],"shape") - eq_(lyr.datasource.describe()['name'],"shape") + assert m.layers[0].datasource.describe()['name'] == "shape" + assert lyr.datasource.describe()['name'] == "shape" - # and since we have now added a shapefile in spherical mercator, adjust the projection + # and since we have now added a shapefile in spherical mercator, adjust + # the projection lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs' # test that assignment - eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs') - eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs') - except RuntimeError, e: + assert m.layers[0].srs == '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs' + assert lyr.srs == '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs' + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e): raise RuntimeError(e) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/layer_test.py b/test/python_tests/layer_test.py index 00ea434d0..e8652bbc0 100644 --- a/test/python_tests/layer_test.py +++ b/test/python_tests/layer_test.py @@ -1,28 +1,21 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import run_all import mapnik # Map initialization + def test_layer_init(): l = mapnik.Layer('test') - eq_(l.name,'test') - eq_(l.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') - eq_(l.envelope(),mapnik.Box2d()) - eq_(l.clear_label_cache,False) - eq_(l.cache_features,False) - eq_(l.visible(1),True) - eq_(l.active,True) - eq_(l.datasource,None) - eq_(l.queryable,False) - eq_(l.minimum_scale_denominator,0.0) - eq_(l.maximum_scale_denominator > 1e+6,True) - eq_(l.group_by,"") - eq_(l.maximum_extent,None) - eq_(l.buffer_size,None) - eq_(len(l.styles),0) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert l.name == 'test' + assert l.srs == 'epsg:4326' + assert l.envelope() == mapnik.Box2d() + assert not l.clear_label_cache + assert not l.cache_features + assert l.visible(1) + assert l.active + assert l.datasource == None + assert not l.queryable + assert l.minimum_scale_denominator == 0.0 + assert l.maximum_scale_denominator > 1e+6 + assert l.group_by == "" + assert l.maximum_extent == None + assert l.buffer_size == None + assert len(l.styles) == 0 diff --git a/test/python_tests/load_map_test.py b/test/python_tests/load_map_test.py index 5eb211ed5..d9e0c3345 100644 --- a/test/python_tests/load_map_test.py +++ b/test/python_tests/load_map_test.py @@ -1,82 +1,94 @@ -#!/usr/bin/env python +import glob,os +import mapnik +import pytest -from nose.tools import eq_ -from utilities import execution_path, run_all - -import os, glob, mapnik +from .utilities import execution_path default_logging_severity = mapnik.logger.get_severity() -def setup(): - # make the tests silent to suppress unsupported params from harfbuzz tests - # TODO: remove this after harfbuzz branch merges - mapnik.logger.set_severity(mapnik.severity_type.None) +@pytest.fixture(scope="module") +def setup_and_teardown(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) - -def teardown(): + # make the tests silent to suppress unsupported params from harfbuzz tests + # TODO: remove this after harfbuzz branch merges + mapnik.logger.set_severity(getattr(mapnik.severity_type, "None")) + yield mapnik.logger.set_severity(default_logging_severity) -def test_broken_files(): +def test_broken_files(setup_and_teardown): default_logging_severity = mapnik.logger.get_severity() - mapnik.logger.set_severity(mapnik.severity_type.None) + mapnik.logger.set_severity(getattr(mapnik.severity_type, "None")) broken_files = glob.glob("../data/broken_maps/*.xml") - # Add a filename that doesn't exist + # Add a filename that doesn't exist broken_files.append("../data/broken/does_not_exist.xml") - failures = []; + failures = [] for filename in broken_files: try: m = mapnik.Map(512, 512) strict = True mapnik.load_map(m, filename, strict) - failures.append('Loading broken map (%s) did not raise RuntimeError!' % filename) + failures.append( + 'Loading broken map (%s) did not raise RuntimeError!' % + filename) except RuntimeError: pass - eq_(len(failures),0,'\n'+'\n'.join(failures)) + assert len(failures) == 0, '\n' + '\n'.join(failures) mapnik.logger.set_severity(default_logging_severity) + def test_can_parse_xml_with_deprecated_properties(): default_logging_severity = mapnik.logger.get_severity() - mapnik.logger.set_severity(mapnik.severity_type.None) + mapnik.logger.set_severity(getattr(mapnik.severity_type, "None")) files_with_deprecated_props = glob.glob("../data/deprecated_maps/*.xml") - failures = []; + failures = [] for filename in files_with_deprecated_props: try: m = mapnik.Map(512, 512) strict = True mapnik.load_map(m, filename, strict) + m = mapnik.Map(512, 512) base_path = os.path.dirname(filename) - mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path) - except RuntimeError, e: + mapnik.load_map_from_string( + m, + open( + filename, + 'rb').read(), + strict, + base_path) + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e) \ and not 'could not connect' in str(e): - failures.append('Failed to load valid map %s (%s)' % (filename,e)) - eq_(len(failures),0,'\n'+'\n'.join(failures)) + failures.append( + 'Failed to load valid map %s (%s)' % + (filename, e)) + assert len(failures) == 0, '\n' + '\n'.join(failures) mapnik.logger.set_severity(default_logging_severity) + def test_good_files(): good_files = glob.glob("../data/good_maps/*.xml") good_files.extend(glob.glob("../visual_tests/styles/*.xml")) - failures = []; + failures = [] for filename in good_files: try: m = mapnik.Map(512, 512) strict = True mapnik.load_map(m, filename, strict) + m = mapnik.Map(512, 512) base_path = os.path.dirname(filename) - mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path) - except RuntimeError, e: + with open(filename, 'rb') as f: + mapnik.load_map_from_string(m, f.read(), strict, base_path) + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e) \ and not 'could not connect' in str(e): - failures.append('Failed to load valid map %s (%s)' % (filename,e)) - eq_(len(failures),0,'\n'+'\n'.join(failures)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + failures.append( + 'Failed to load valid map %s (%s)' % + (filename, e)) + assert len(failures) == 0, '\n' + '\n'.join(failures) diff --git a/test/python_tests/map_query_test.py b/test/python_tests/map_query_test.py index 4035f7ae9..542c43fd4 100644 --- a/test/python_tests/map_query_test.py +++ b/test/python_tests/map_query_test.py @@ -1,104 +1,106 @@ -#!/usr/bin/env python - -from nose.tools import eq_,raises,assert_almost_equal -from utilities import execution_path, run_all -import os, mapnik +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield # map has no layers -@raises(IndexError) -def test_map_query_throw1(): - m = mapnik.Map(256,256) - m.zoom_to_box(mapnik.Box2d(-1,-1,0,0)) - m.query_point(0,0,0) +def test_map_query_throw1(setup): + with pytest.raises(IndexError): + m = mapnik.Map(256, 256) + m.zoom_to_box(mapnik.Box2d(-1, -1, 0, 0)) + m.query_point(0, 0, 0) # only positive indexes -@raises(IndexError) def test_map_query_throw2(): - m = mapnik.Map(256,256) - m.query_point(-1,0,0) + with pytest.raises(IndexError): + m = mapnik.Map(256, 256) + m.query_point(-1, 0, 0) # map has never been zoomed (nodata) -@raises(RuntimeError) def test_map_query_throw3(): - m = mapnik.Map(256,256) - m.query_point(0,0,0) + with pytest.raises(RuntimeError): + m = mapnik.Map(256, 256) + m.query_point(0, 0, 0) if 'shape' in mapnik.DatasourceCache.plugin_names(): # map has never been zoomed (even with data) - @raises(RuntimeError) def test_map_query_throw4(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml') - m.query_point(0,0,0) + with pytest.raises(RuntimeError): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml') + m.query_point(0, 0, 0) # invalid coords in general (do not intersect) - @raises(RuntimeError) def test_map_query_throw5(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml') - m.zoom_all() - m.query_point(0,9999999999999999,9999999999999999) + with pytest.raises(RuntimeError): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml') + m.zoom_all() + m.query_point(0, 9999999999999999, 9999999999999999) def test_map_query_works1(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml') - merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34) + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml') + merc_bounds = mapnik.Box2d(-20037508.34, - + 20037508.34, 20037508.34, 20037508.34) m.maximum_extent = merc_bounds m.zoom_all() - fs = m.query_point(0,-11012435.5376, 4599674.6134) # somewhere in kansas - feat = fs.next() - eq_(feat.attributes['NAME_FORMA'],u'United States of America') + # somewhere in kansas + fs = m.query_point(0, -11012435.5376, 4599674.6134) + feat = next(fs) + assert feat.attributes['NAME_FORMA'] == u'United States of America' def test_map_query_works2(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml') - wgs84_bounds = mapnik.Box2d(-179.999999975,-85.0511287776,179.999999975,85.0511287776) + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml') + wgs84_bounds = mapnik.Box2d(-179.999999975, - + 85.0511287776, 179.999999975, 85.0511287776) m.maximum_extent = wgs84_bounds # caution - will go square due to evil aspect_fix_mode backhandedness m.zoom_all() - #mapnik.render_to_file(m,'works2.png') + # mapnik.render_to_file(m,'works2.png') # validate that aspect_fix_mode modified the bbox reasonably e = m.envelope() - assert_almost_equal(e.minx, -179.999999975, places=7) - assert_almost_equal(e.miny, -167.951396161, places=7) - assert_almost_equal(e.maxx, 179.999999975, places=7) - assert_almost_equal(e.maxy, 192.048603789, places=7) - fs = m.query_point(0,-98.9264, 38.1432) # somewhere in kansas - feat = fs.next() - eq_(feat.attributes['NAME'],u'United States') + assert e.minx == pytest.approx(-179.999999975, abs=1e-7) + assert e.miny == pytest.approx(-167.951396161, abs=1e-7) + assert e.maxx == pytest.approx(179.999999975, abs=1e-7) + assert e.maxy == pytest.approx(192.048603789, abs=1e-7) + fs = m.query_point(0, -98.9264, 38.1432) # somewhere in kansas + feat = next(fs) + assert feat.attributes['NAME'] == u'United States' def test_map_query_in_pixels_works1(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml') - merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34) + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml') + merc_bounds = mapnik.Box2d(-20037508.34, - + 20037508.34, 20037508.34, 20037508.34) m.maximum_extent = merc_bounds m.zoom_all() - fs = m.query_map_point(0,55,100) # somewhere in middle of us - feat = fs.next() - eq_(feat.attributes['NAME_FORMA'],u'United States of America') + fs = m.query_map_point(0, 55, 100) # somewhere in middle of us + feat = next(fs) + assert feat.attributes['NAME_FORMA'] == u'United States of America' def test_map_query_in_pixels_works2(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml') - wgs84_bounds = mapnik.Box2d(-179.999999975,-85.0511287776,179.999999975,85.0511287776) + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml') + wgs84_bounds = mapnik.Box2d(-179.999999975, - + 85.0511287776, 179.999999975, 85.0511287776) m.maximum_extent = wgs84_bounds # caution - will go square due to evil aspect_fix_mode backhandedness m.zoom_all() # validate that aspect_fix_mode modified the bbox reasonably e = m.envelope() - assert_almost_equal(e.minx, -179.999999975, places=7) - assert_almost_equal(e.miny, -167.951396161, places=7) - assert_almost_equal(e.maxx, 179.999999975, places=7) - assert_almost_equal(e.maxy, 192.048603789, places=7) - fs = m.query_map_point(0,55,100) # somewhere in Canada - feat = fs.next() - eq_(feat.attributes['NAME'],u'Canada') - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert e.minx == pytest.approx(-179.999999975, abs=1e-7) + assert e.miny == pytest.approx(-167.951396161, abs=1e-7) + assert e.maxx == pytest.approx(179.999999975, abs=1e-7) + assert e.maxy == pytest.approx(192.048603789, abs=1e-7) + fs = m.query_map_point(0, 55, 100) # somewhere in Canada + feat = next(fs) + assert feat.attributes['NAME'] == u'Canada' diff --git a/test/python_tests/mapnik_logger_test.py b/test/python_tests/mapnik_logger_test.py index c27ff46a0..7d7566890 100644 --- a/test/python_tests/mapnik_logger_test.py +++ b/test/python_tests/mapnik_logger_test.py @@ -1,18 +1,12 @@ -#!/usr/bin/env python -from nose.tools import eq_ -from utilities import run_all import mapnik def test_logger_init(): - eq_(mapnik.severity_type.Debug,0) - eq_(mapnik.severity_type.Warn,1) - eq_(mapnik.severity_type.Error,2) - eq_(mapnik.severity_type.None,3) + assert mapnik.severity_type.Debug == 0 + assert mapnik.severity_type.Warn == 1 + assert mapnik.severity_type.Error == 2 + assert getattr(mapnik.severity_type, "None") == 3 default = mapnik.logger.get_severity() mapnik.logger.set_severity(mapnik.severity_type.Debug) - eq_(mapnik.logger.get_severity(),mapnik.severity_type.Debug) + assert mapnik.logger.get_severity() == mapnik.severity_type.Debug mapnik.logger.set_severity(default) - eq_(mapnik.logger.get_severity(),default) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert mapnik.logger.get_severity() == default diff --git a/test/python_tests/mapnik_test_data_test.py b/test/python_tests/mapnik_test_data_test.py index b4226e125..a75306e74 100644 --- a/test/python_tests/mapnik_test_data_test.py +++ b/test/python_tests/mapnik_test_data_test.py @@ -1,31 +1,18 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from utilities import execution_path, run_all -import os, mapnik +import os from glob import glob - -default_logging_severity = mapnik.logger.get_severity() - -def setup(): - mapnik.logger.set_severity(mapnik.severity_type.None) - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) - -def teardown(): - mapnik.logger.set_severity(default_logging_severity) +import mapnik plugin_mapping = { - '.csv' : ['csv'], - '.json': ['geojson','ogr'], - '.tif' : ['gdal'], + '.csv': ['csv'], + '.json': ['geojson', 'ogr'], + '.tif': ['gdal'], #'.tif' : ['gdal','raster'], - '.kml' : ['ogr'], - '.gpx' : ['ogr'], - '.vrt' : ['gdal'] + '.kml': ['ogr'], + '.gpx': ['ogr'], + '.vrt': ['gdal'] } + def test_opening_data(): # https://github.com/mapbox/mapnik-test-data # cd tests/data @@ -35,26 +22,22 @@ def test_opening_data(): for filepath in files: ext = os.path.splitext(filepath)[1] if plugin_mapping.get(ext): - #print 'testing opening %s' % filepath + # print 'testing opening %s' % filepath if 'topo' in filepath: - kwargs = {'type': 'ogr','file': filepath} + kwargs = {'type': 'ogr', 'file': filepath} kwargs['layer_by_index'] = 0 try: mapnik.Datasource(**kwargs) - except Exception, e: - print 'could not open, %s: %s' % (kwargs,e) + except Exception as e: + print('could not open, %s: %s' % (kwargs, e)) else: - for plugin in plugin_mapping[ext]: - kwargs = {'type': plugin,'file': filepath} - if plugin is 'ogr': - kwargs['layer_by_index'] = 0 - try: - mapnik.Datasource(**kwargs) - except Exception, e: - print 'could not open, %s: %s' % (kwargs,e) - #else: + for plugin in plugin_mapping[ext]: + kwargs = {'type': plugin, 'file': filepath} + if plugin == 'ogr': + kwargs['layer_by_index'] = 0 + try: + mapnik.Datasource(**kwargs) + except Exception as e: + print('could not open, %s: %s' % (kwargs, e)) + # else: # print 'skipping opening %s' % filepath - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/markers_complex_rendering_test.py b/test/python_tests/markers_complex_rendering_test.py index efce684fa..8c07f6b67 100644 --- a/test/python_tests/markers_complex_rendering_test.py +++ b/test/python_tests/markers_complex_rendering_test.py @@ -1,43 +1,40 @@ -#coding=utf8 -import os +import pytest import mapnik -from utilities import execution_path, run_all -from nose.tools import eq_ +import os +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'csv' in mapnik.DatasourceCache.plugin_names(): - def test_marker_ellipse_render1(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/marker_ellipse_transform.xml') + def test_marker_ellipse_render1(setup): + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform.xml') m.zoom_all() - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) actual = '/tmp/mapnik-marker-ellipse-render1.png' - expected = 'images/support/mapnik-marker-ellipse-render1.png' - im.save(actual,'png32') + expected = './images/support/mapnik-marker-ellipse-render1.png' + im.save(actual, 'png32') if os.environ.get('UPDATE'): - im.save(expected,'png32') + im.save(expected, 'png32') expected_im = mapnik.Image.open(expected) - eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected)) + assert im.to_string('png32') == expected_im.to_string('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual, expected) def test_marker_ellipse_render2(): - m = mapnik.Map(256,256) - mapnik.load_map(m,'../data/good_maps/marker_ellipse_transform2.xml') + m = mapnik.Map(256, 256) + mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform2.xml') m.zoom_all() - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) actual = '/tmp/mapnik-marker-ellipse-render2.png' - expected = 'images/support/mapnik-marker-ellipse-render2.png' - im.save(actual,'png32') + expected = './images/support/mapnik-marker-ellipse-render2.png' + im.save(actual, 'png32') if os.environ.get('UPDATE'): - im.save(expected,'png32') + im.save(expected, 'png32') expected_im = mapnik.Image.open(expected) - eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im.to_string('png32') == expected_im.to_string('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual, expected) diff --git a/test/python_tests/memory_datasource_test.py b/test/python_tests/memory_datasource_test.py index bd82beaef..fbe1d8987 100644 --- a/test/python_tests/memory_datasource_test.py +++ b/test/python_tests/memory_datasource_test.py @@ -1,34 +1,28 @@ -#encoding: utf8 import mapnik -from utilities import run_all -from nose.tools import eq_ def test_add_feature(): md = mapnik.MemoryDatasource() - eq_(md.num_features(), 0) + assert md.num_features() == 0 context = mapnik.Context() context.push('foo') - feature = mapnik.Feature(context,1) + feature = mapnik.Feature(context, 1) feature['foo'] = 'bar' feature.geometry = mapnik.Geometry.from_wkt('POINT(2 3)') md.add_feature(feature) - eq_(md.num_features(), 1) + assert md.num_features() == 1 - featureset = md.features_at_point(mapnik.Coord(2,3)) + featureset = md.features_at_point(mapnik.Coord(2, 3)) retrieved = [] for feat in featureset: retrieved.append(feat) - eq_(len(retrieved), 1) + assert len(retrieved) == 1 f = retrieved[0] - eq_(f['foo'], 'bar') + assert f['foo'] == 'bar' - featureset = md.features_at_point(mapnik.Coord(20,30)) + featureset = md.features_at_point(mapnik.Coord(20, 30)) retrieved = [] for feat in featureset: retrieved.append(feat) - eq_(len(retrieved), 0) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert len(retrieved) == 0 diff --git a/test/python_tests/multi_tile_raster_test.py b/test/python_tests/multi_tile_raster_test.py index 7dda8760f..b4b825b01 100644 --- a/test/python_tests/multi_tile_raster_test.py +++ b/test/python_tests/multi_tile_raster_test.py @@ -1,35 +1,37 @@ -#!/usr/bin/env python - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + -def test_multi_tile_policy(): - srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' +def test_multi_tile_policy(setup): + srs = 'epsg:4326' lyr = mapnik.Layer('raster') if 'raster' in mapnik.DatasourceCache.plugin_names(): lyr.datasource = mapnik.Raster( - file = '../data/raster_tiles/${x}/${y}.tif', - lox = -180, - loy = -90, - hix = 180, - hiy = 90, - multi = 1, - tile_size = 256, - x_width = 2, - y_width = 2 - ) + file='../data/raster_tiles/${x}/${y}.tif', + lox=-180, + loy=-90, + hix=180, + hiy=90, + multi=1, + tile_size=256, + x_width=2, + y_width=2 + ) lyr.srs = srs _map = mapnik.Map(256, 256, srs) style = mapnik.Style() rule = mapnik.Rule() sym = mapnik.RasterSymbolizer() - rule.symbols.append(sym) + rule.symbolizers.append(sym) style.rules.append(rule) _map.append_style('foo', style) lyr.styles.append('foo') @@ -38,31 +40,25 @@ def test_multi_tile_policy(): im = mapnik.Image(_map.width, _map.height) mapnik.render(_map, im) - - # test green chunk - eq_(im.view(0,64,1,1).tostring(), '\x00\xff\x00\xff') - eq_(im.view(127,64,1,1).tostring(), '\x00\xff\x00\xff') - eq_(im.view(0,127,1,1).tostring(), '\x00\xff\x00\xff') - eq_(im.view(127,127,1,1).tostring(), '\x00\xff\x00\xff') + assert im.view(0, 64, 1, 1).to_string() == b'\x00\xff\x00\xff' + assert im.view(127, 64, 1, 1).to_string() == b'\x00\xff\x00\xff' + assert im.view(0, 127, 1, 1).to_string() == b'\x00\xff\x00\xff' + assert im.view(127, 127, 1, 1).to_string() == b'\x00\xff\x00\xff' # test blue chunk - eq_(im.view(128,64,1,1).tostring(), '\x00\x00\xff\xff') - eq_(im.view(255,64,1,1).tostring(), '\x00\x00\xff\xff') - eq_(im.view(128,127,1,1).tostring(), '\x00\x00\xff\xff') - eq_(im.view(255,127,1,1).tostring(), '\x00\x00\xff\xff') + assert im.view(128, 64, 1, 1).to_string() == b'\x00\x00\xff\xff' + assert im.view(255, 64, 1, 1).to_string() == b'\x00\x00\xff\xff' + assert im.view(128, 127, 1, 1).to_string() == b'\x00\x00\xff\xff' + assert im.view(255, 127, 1, 1).to_string() == b'\x00\x00\xff\xff' # test red chunk - eq_(im.view(0,128,1,1).tostring(), '\xff\x00\x00\xff') - eq_(im.view(127,128,1,1).tostring(), '\xff\x00\x00\xff') - eq_(im.view(0,191,1,1).tostring(), '\xff\x00\x00\xff') - eq_(im.view(127,191,1,1).tostring(), '\xff\x00\x00\xff') + assert im.view(0, 128, 1, 1).to_string() == b'\xff\x00\x00\xff' + assert im.view(127, 128, 1, 1).to_string() == b'\xff\x00\x00\xff' + assert im.view(0, 191, 1, 1).to_string() == b'\xff\x00\x00\xff' + assert im.view(127, 191, 1, 1).to_string() == b'\xff\x00\x00\xff' # test magenta chunk - eq_(im.view(128,128,1,1).tostring(), '\xff\x00\xff\xff') - eq_(im.view(255,128,1,1).tostring(), '\xff\x00\xff\xff') - eq_(im.view(128,191,1,1).tostring(), '\xff\x00\xff\xff') - eq_(im.view(255,191,1,1).tostring(), '\xff\x00\xff\xff') - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im.view(128, 128, 1, 1).to_string() == b'\xff\x00\xff\xff' + assert im.view(255, 128, 1, 1).to_string() == b'\xff\x00\xff\xff' + assert im.view(128, 191, 1, 1).to_string() == b'\xff\x00\xff\xff' + assert im.view(255, 191, 1, 1).to_string() == b'\xff\x00\xff\xff' diff --git a/test/python_tests/object_test.py b/test/python_tests/object_test.py index 0f23e7118..e965c37f3 100644 --- a/test/python_tests/object_test.py +++ b/test/python_tests/object_test.py @@ -1,569 +1,275 @@ -# #!/usr/bin/env python -# # -*- coding: utf-8 -*- - -# import os -# from nose.tools import * -# from utilities import execution_path, run_all -# import tempfile - -# import mapnik - -# def setup(): -# # All of the paths used are relative, if we run the tests -# # from another directory we need to chdir() -# os.chdir(execution_path('.')) - -# def test_debug_symbolizer(): -# s = mapnik.DebugSymbolizer() -# eq_(s.mode,mapnik.debug_symbolizer_mode.collision) - -# def test_raster_symbolizer(): -# s = mapnik.RasterSymbolizer() -# eq_(s.comp_op,mapnik.CompositeOp.src_over) # note: mode is deprecated -# eq_(s.scaling,mapnik.scaling_method.NEAR) -# eq_(s.opacity,1.0) -# eq_(s.colorizer,None) -# eq_(s.filter_factor,-1) -# eq_(s.mesh_size,16) -# eq_(s.premultiplied,None) -# s.premultiplied = True -# eq_(s.premultiplied,True) - -# def test_line_pattern(): -# s = mapnik.LinePatternSymbolizer(mapnik.PathExpression('../data/images/dummy.png')) -# eq_(s.filename, '../data/images/dummy.png') -# eq_(s.smooth,0.0) -# eq_(s.transform,'') -# eq_(s.offset,0.0) -# eq_(s.comp_op,mapnik.CompositeOp.src_over) -# eq_(s.clip,True) - -# def test_line_symbolizer(): -# s = mapnik.LineSymbolizer() -# eq_(s.rasterizer, mapnik.line_rasterizer.FULL) -# eq_(s.smooth,0.0) -# eq_(s.comp_op,mapnik.CompositeOp.src_over) -# eq_(s.clip,True) -# eq_(s.stroke.width, 1) -# eq_(s.stroke.opacity, 1) -# eq_(s.stroke.color, mapnik.Color('black')) -# eq_(s.stroke.line_cap, mapnik.line_cap.BUTT_CAP) -# eq_(s.stroke.line_join, mapnik.line_join.MITER_JOIN) - -# l = mapnik.LineSymbolizer(mapnik.Color('blue'), 5.0) - -# eq_(l.stroke.width, 5) -# eq_(l.stroke.opacity, 1) -# eq_(l.stroke.color, mapnik.Color('blue')) -# eq_(l.stroke.line_cap, mapnik.line_cap.BUTT_CAP) -# eq_(l.stroke.line_join, mapnik.line_join.MITER_JOIN) - -# s = mapnik.Stroke(mapnik.Color('blue'), 5.0) -# l = mapnik.LineSymbolizer(s) - -# eq_(l.stroke.width, 5) -# eq_(l.stroke.opacity, 1) -# eq_(l.stroke.color, mapnik.Color('blue')) -# eq_(l.stroke.line_cap, mapnik.line_cap.BUTT_CAP) -# eq_(l.stroke.line_join, mapnik.line_join.MITER_JOIN) - -# def test_line_symbolizer_stroke_reference(): -# l = mapnik.LineSymbolizer(mapnik.Color('green'),0.1) -# l.stroke.add_dash(.1,.1) -# l.stroke.add_dash(.1,.1) -# eq_(l.stroke.get_dashes(), [(.1,.1),(.1,.1)]) -# eq_(l.stroke.color,mapnik.Color('green')) -# eq_(l.stroke.opacity,1.0) -# assert_almost_equal(l.stroke.width,0.1) - -# # https://github.com/mapnik/mapnik/issues/1427 -# def test_stroke_dash_api(): -# stroke = mapnik.Stroke() -# dashes = [(1.0,1.0)] -# stroke.dasharray = dashes -# eq_(stroke.dasharray, dashes) -# stroke.add_dash(.1,.1) -# dashes.append((.1,.1)) -# eq_(stroke.dasharray, dashes) - - -# def test_text_symbolizer(): -# s = mapnik.TextSymbolizer() -# eq_(s.comp_op,mapnik.CompositeOp.src_over) -# eq_(s.clip,True) -# eq_(s.halo_rasterizer,mapnik.halo_rasterizer.FULL) - -# # https://github.com/mapnik/mapnik/issues/1420 -# eq_(s.text_transform, mapnik.text_transform.NONE) - -# # old args required method -# ts = mapnik.TextSymbolizer(mapnik.Expression('[Field_Name]'), 'Font Name', 8, mapnik.Color('black')) -# # eq_(str(ts.name), str(mapnik2.Expression('[Field_Name]'))) name field is no longer supported -# eq_(ts.format.face_name, 'Font Name') -# eq_(ts.format.text_size, 8) -# eq_(ts.format.fill, mapnik.Color('black')) -# eq_(ts.properties.label_placement, mapnik.label_placement.POINT_PLACEMENT) -# eq_(ts.properties.horizontal_alignment, mapnik.horizontal_alignment.AUTO) - -# def test_shield_symbolizer_init(): -# s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../data/images/dummy.png')) -# eq_(s.comp_op,mapnik.CompositeOp.src_over) -# eq_(s.clip,True) -# eq_(s.displacement, (0.0,0.0)) -# eq_(s.allow_overlap, False) -# eq_(s.avoid_edges, False) -# eq_(s.character_spacing,0) -# #eq_(str(s.name), str(mapnik2.Expression('[Field Name]'))) name field is no longer supported -# eq_(s.face_name, 'DejaVu Sans Bold') -# eq_(s.allow_overlap, False) -# eq_(s.fill, mapnik.Color('#000000')) -# eq_(s.halo_fill, mapnik.Color('rgb(255,255,255)')) -# eq_(s.halo_radius, 0) -# eq_(s.label_placement, mapnik.label_placement.POINT_PLACEMENT) -# eq_(s.minimum_distance, 0.0) -# eq_(s.text_ratio, 0) -# eq_(s.text_size, 6) -# eq_(s.wrap_width, 0) -# eq_(s.vertical_alignment, mapnik.vertical_alignment.AUTO) -# eq_(s.label_spacing, 0) -# eq_(s.label_position_tolerance, 0) -# # 22.5 * M_PI/180.0 initialized by default -# assert_almost_equal(s.max_char_angle_delta, 0.39269908169872414) - -# eq_(s.text_transform, mapnik.text_transform.NONE) -# eq_(s.line_spacing, 0) -# eq_(s.character_spacing, 0) - -# # r1341 -# eq_(s.wrap_before, False) -# eq_(s.horizontal_alignment, mapnik.horizontal_alignment.AUTO) -# eq_(s.justify_alignment, mapnik.justify_alignment.AUTO) -# eq_(s.opacity, 1.0) - -# # r2300 -# eq_(s.minimum_padding, 0.0) - -# # was mixed with s.opacity -# eq_(s.text_opacity, 1.0) - -# eq_(s.shield_displacement, (0.0,0.0)) -# # TODO - the pattern in bindings seems to be to get/set -# # strings for PathExpressions... should we pass objects? -# eq_(s.filename, '../data/images/dummy.png') - -# # 11c34b1: default transform list is empty, not identity matrix -# eq_(s.transform, '') - -# eq_(s.fontset, None) - -# # ShieldSymbolizer missing image file -# # images paths are now PathExpressions are evaluated at runtime -# # so it does not make sense to throw... -# #@raises(RuntimeError) -# #def test_shieldsymbolizer_missing_image(): -# # s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../#data/images/broken.png')) - -# def test_shield_symbolizer_modify(): -# s = mapnik.ShieldSymbolizer(mapnik.Expression('[Field Name]'), 'DejaVu Sans Bold', 6, mapnik.Color('#000000'), mapnik.PathExpression('../data/images/dummy.png')) -# # transform expression -# def check_transform(expr, expect_str=None): -# s.transform = expr -# eq_(s.transform, expr if expect_str is None else expect_str) -# check_transform("matrix(1 2 3 4 5 6)", "matrix(1, 2, 3, 4, 5, 6)") -# check_transform("matrix(1, 2, 3, 4, 5, 6 +7)", "matrix(1, 2, 3, 4, 5, (6+7))") -# check_transform("rotate([a])") -# check_transform("rotate([a] -2)", "rotate(([a]-2))") -# check_transform("rotate([a] -2 -3)", "rotate([a], -2, -3)") -# check_transform("rotate([a] -2 -3 -4)", "rotate(((([a]-2)-3)-4))") -# check_transform("rotate([a] -2, 3, 4)", "rotate(([a]-2), 3, 4)") -# check_transform("translate([tx]) rotate([a])") -# check_transform("scale([sx], [sy]/2)") -# # TODO check expected failures - -# def test_point_symbolizer(): -# p = mapnik.PointSymbolizer() -# eq_(p.filename,'') -# eq_(p.transform,'') -# eq_(p.opacity,1.0) -# eq_(p.allow_overlap,False) -# eq_(p.ignore_placement,False) -# eq_(p.comp_op,mapnik.CompositeOp.src_over) -# eq_(p.placement, mapnik.point_placement.CENTROID) - -# p = mapnik.PointSymbolizer(mapnik.PathExpression("../data/images/dummy.png")) -# p.allow_overlap = True -# p.opacity = 0.5 -# p.ignore_placement = True -# p.placement = mapnik.point_placement.INTERIOR -# eq_(p.allow_overlap, True) -# eq_(p.opacity, 0.5) -# eq_(p.filename,'../data/images/dummy.png') -# eq_(p.ignore_placement,True) -# eq_(p.placement, mapnik.point_placement.INTERIOR) - -# def test_markers_symbolizer(): -# p = mapnik.MarkersSymbolizer() -# eq_(p.allow_overlap, False) -# eq_(p.opacity,1.0) -# eq_(p.fill_opacity,None) -# eq_(p.filename,'shape://ellipse') -# eq_(p.placement,mapnik.marker_placement.POINT_PLACEMENT) -# eq_(p.multi_policy,mapnik.marker_multi_policy.EACH) -# eq_(p.fill,None) -# eq_(p.ignore_placement,False) -# eq_(p.spacing,100) -# eq_(p.max_error,0.2) -# eq_(p.width,None) -# eq_(p.height,None) -# eq_(p.transform,'') -# eq_(p.clip,True) -# eq_(p.comp_op,mapnik.CompositeOp.src_over) - - -# p.width = mapnik.Expression('12') -# p.height = mapnik.Expression('12') -# eq_(str(p.width),'12') -# eq_(str(p.height),'12') - -# p.width = mapnik.Expression('[field] + 2') -# p.height = mapnik.Expression('[field] + 2') -# eq_(str(p.width),'([field]+2)') -# eq_(str(p.height),'([field]+2)') - -# stroke = mapnik.Stroke() -# stroke.color = mapnik.Color('black') -# stroke.width = 1.0 - -# p.stroke = stroke -# p.fill = mapnik.Color('white') -# p.allow_overlap = True -# p.opacity = 0.5 -# p.fill_opacity = 0.5 -# p.placement = mapnik.marker_placement.LINE_PLACEMENT -# p.multi_policy = mapnik.marker_multi_policy.WHOLE - -# eq_(p.allow_overlap, True) -# eq_(p.opacity, 0.5) -# eq_(p.fill_opacity, 0.5) -# eq_(p.multi_policy,mapnik.marker_multi_policy.WHOLE) -# eq_(p.placement,mapnik.marker_placement.LINE_PLACEMENT) - -# #https://github.com/mapnik/mapnik/issues/1285 -# #https://github.com/mapnik/mapnik/issues/1427 -# p.marker_type = 'arrow' -# eq_(p.marker_type,'shape://arrow') -# eq_(p.filename,'shape://arrow') - - -# # PointSymbolizer missing image file -# # images paths are now PathExpressions are evaluated at runtime -# # so it does not make sense to throw... -# #@raises(RuntimeError) -# #def test_pointsymbolizer_missing_image(): -# # p = mapnik.PointSymbolizer(mapnik.PathExpression("../data/images/broken.png")) - -# def test_polygon_symbolizer(): -# p = mapnik.PolygonSymbolizer() -# eq_(p.smooth,0.0) -# eq_(p.comp_op,mapnik.CompositeOp.src_over) -# eq_(p.clip,True) -# eq_(p.fill, mapnik.Color('gray')) -# eq_(p.fill_opacity, 1) - -# p = mapnik.PolygonSymbolizer(mapnik.Color('blue')) - -# eq_(p.fill, mapnik.Color('blue')) -# eq_(p.fill_opacity, 1) - -# def test_building_symbolizer_init(): -# p = mapnik.BuildingSymbolizer() - -# eq_(p.fill, mapnik.Color('gray')) -# eq_(p.fill_opacity, 1) -# eq_(p.height,None) - -# def test_group_symbolizer_init(): -# s = mapnik.GroupSymbolizer() - -# p = mapnik.GroupSymbolizerProperties() - -# l = mapnik.PairLayout() -# l.item_margin = 5.0 -# p.set_layout(l) - -# r = mapnik.GroupRule(mapnik.Expression("[name%1]")) -# r.append(mapnik.PointSymbolizer()) -# p.add_rule(r) -# s.symbolizer_properties = p - -# eq_(s.comp_op,mapnik.CompositeOp.src_over) - -# def test_stroke_init(): -# s = mapnik.Stroke() - -# eq_(s.width, 1) -# eq_(s.opacity, 1) -# eq_(s.color, mapnik.Color('black')) -# eq_(s.line_cap, mapnik.line_cap.BUTT_CAP) -# eq_(s.line_join, mapnik.line_join.MITER_JOIN) -# eq_(s.gamma,1.0) - -# s = mapnik.Stroke(mapnik.Color('blue'), 5.0) -# s.gamma = .5 - -# eq_(s.width, 5) -# eq_(s.opacity, 1) -# eq_(s.color, mapnik.Color('blue')) -# eq_(s.gamma, .5) -# eq_(s.line_cap, mapnik.line_cap.BUTT_CAP) -# eq_(s.line_join, mapnik.line_join.MITER_JOIN) - -# def test_stroke_dash_arrays(): -# s = mapnik.Stroke() -# s.add_dash(1,2) -# s.add_dash(3,4) -# s.add_dash(5,6) - -# eq_(s.get_dashes(), [(1,2),(3,4),(5,6)]) - -# def test_map_init(): -# m = mapnik.Map(256, 256) - -# eq_(m.width, 256) -# eq_(m.height, 256) -# eq_(m.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') -# eq_(m.base, '') -# eq_(m.maximum_extent, None) -# eq_(m.background_image, None) -# eq_(m.background_image_comp_op, mapnik.CompositeOp.src_over) -# eq_(m.background_image_opacity, 1.0) - -# m = mapnik.Map(256, 256, '+proj=latlong') -# eq_(m.srs, '+proj=latlong') - -# def test_map_style_access(): -# m = mapnik.Map(256, 256) -# sty = mapnik.Style() -# m.append_style("style",sty) -# styles = list(m.styles) -# eq_(len(styles),1) -# eq_(styles[0][0],'style') -# # returns a copy so let's just check it is the right instance -# eq_(isinstance(styles[0][1],mapnik.Style),True) - -# def test_map_maximum_extent_modification(): -# m = mapnik.Map(256, 256) -# eq_(m.maximum_extent, None) -# m.maximum_extent = mapnik.Box2d() -# eq_(m.maximum_extent, mapnik.Box2d()) -# m.maximum_extent = None -# eq_(m.maximum_extent, None) - -# # Map initialization from string -# def test_map_init_from_string(): -# map_string = ''' -# -# -# My Style -# -# shape -# ../../demo/data/boundaries -# -# -# ''' - -# m = mapnik.Map(600, 300) -# eq_(m.base, '') -# try: -# mapnik.load_map_from_string(m, map_string) -# eq_(m.base, './') -# mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect -# eq_(m.base, './') - -# tmp_dir = tempfile.gettempdir() -# try: -# mapnik.load_map_from_string(m, map_string, False, tmp_dir) -# except RuntimeError: -# pass # runtime error expected because shapefile path should be wrong and datasource will throw -# eq_(m.base, tmp_dir) # tmp_dir will be set despite the exception because load_map mostly worked -# m.base = 'foo' -# mapnik.load_map_from_string(m, map_string, True, ".") -# eq_(m.base, '.') -# except RuntimeError, e: -# # only test datasources that we have installed -# if not 'Could not create datasource' in str(e): -# raise RuntimeError(e) +import os +import tempfile +import mapnik +import pytest + +from .utilities import execution_path + +@pytest.fixture(scope="module") +def setup(): + # All of the paths used are relative, if we run the tests + # from another directory we need to chdir() + os.chdir(execution_path('.')) + yield + +def test_debug_symbolizer(setup): + s = mapnik.DebugSymbolizer() + s.mode = mapnik.debug_symbolizer_mode.COLLISION + assert s.mode == mapnik.debug_symbolizer_mode.COLLISION + +def test_raster_symbolizer(): + s = mapnik.RasterSymbolizer() + s.comp_op = mapnik.CompositeOp.src_over + s.scaling = mapnik.scaling_method.NEAR + s.opacity = 1.0 + s.mesh_size = 16 + + assert s.comp_op == mapnik.CompositeOp.src_over # note: mode is deprecated + assert s.scaling == mapnik.scaling_method.NEAR + assert s.opacity == 1.0 + assert s.colorizer == None + assert s.mesh_size == 16 + assert s.premultiplied == None + s.premultiplied = True + assert s.premultiplied == True + +def test_line_pattern(): + s = mapnik.LinePatternSymbolizer() + s.file = mapnik.PathExpression('../data/images/dummy.png') + assert str(s.file) == '../data/images/dummy.png' + +def test_map_init(): + m = mapnik.Map(256, 256) + assert m.width == 256 + assert m.height == 256 + assert m.srs == 'epsg:4326' + assert m.base == '' + assert m.maximum_extent == None + assert m.background_image == None + assert m.background_image_comp_op == mapnik.CompositeOp.src_over + assert m.background_image_opacity == 1.0 + m = mapnik.Map(256, 256, '+proj=latlong') + assert m.srs == '+proj=latlong' + +def test_map_style_access(): + m = mapnik.Map(256, 256) + sty = mapnik.Style() + m.append_style("style",sty) + styles = list(m.styles.items()) + assert len(styles) == 1 + assert styles[0][0] == 'style' + # returns a copy so let's just check it is the right instance + assert isinstance(styles[0][1],mapnik.Style) + +def test_map_maximum_extent_modification(): + m = mapnik.Map(256, 256) + assert m.maximum_extent == None + m.maximum_extent = mapnik.Box2d() + assert m.maximum_extent == mapnik.Box2d() + m.maximum_extent = None + assert m.maximum_extent == None + +# Map initialization from string +def test_map_init_from_string(): + map_string = ''' + + + My Style + + shape + ../../demo/data/boundaries + + + ''' + + m = mapnik.Map(600, 300) + assert m.base == '' + try: + mapnik.load_map_from_string(m, map_string) + assert m.base == './' + mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect + assert m.base == './' + + tmp_dir = tempfile.gettempdir() + try: + mapnik.load_map_from_string(m, map_string, False, tmp_dir) + except RuntimeError: + pass # runtime error expected because shapefile path should be wrong and datasource will throw + assert m.base == tmp_dir # tmp_dir will be set despite the exception because load_map mostly worked + m.remove_all() + m.base = 'foo' + mapnik.load_map_from_string(m, map_string, True, ".") + assert m.base == '.' + except RuntimeError as e: + # only test datasources that we have installed + if not 'Could not create datasource' in str(e): + raise RuntimeError(e) # # Color initialization -# @raises(Exception) # Boost.Python.ArgumentError -# def test_color_init_errors(): -# c = mapnik.Color() +def test_color_init_errors(): + with pytest.raises(Exception): # Boost.Python.ArgumentError + c = mapnik.Color() -# @raises(RuntimeError) -# def test_color_init_errors(): -# c = mapnik.Color('foo') # mapnik config +def test_color_init_errors(): + with pytest.raises(RuntimeError): + c = mapnik.Color('foo') # mapnik config -# def test_color_init(): -# c = mapnik.Color('blue') +def test_color_init(): + c = mapnik.Color('blue') + assert c.a == 255 + assert c.r == 0 + assert c.g == 0 + assert c.b == 255 -# eq_(c.a, 255) -# eq_(c.r, 0) -# eq_(c.g, 0) -# eq_(c.b, 255) + assert c.to_hex_string() == '#0000ff' -# eq_(c.to_hex_string(), '#0000ff') + c = mapnik.Color('#f2eff9') -# c = mapnik.Color('#f2eff9') + assert c.a == 255 + assert c.r == 242 + assert c.g == 239 + assert c.b == 249 -# eq_(c.a, 255) -# eq_(c.r, 242) -# eq_(c.g, 239) -# eq_(c.b, 249) + assert c.to_hex_string() == '#f2eff9' -# eq_(c.to_hex_string(), '#f2eff9') + c = mapnik.Color('rgb(50%,50%,50%)') -# c = mapnik.Color('rgb(50%,50%,50%)') + assert c.a == 255 + assert c.r == 128 + assert c.g == 128 + assert c.b == 128 -# eq_(c.a, 255) -# eq_(c.r, 128) -# eq_(c.g, 128) -# eq_(c.b, 128) + assert c.to_hex_string() == '#808080' -# eq_(c.to_hex_string(), '#808080') + c = mapnik.Color(0, 64, 128) -# c = mapnik.Color(0, 64, 128) + assert c.a == 255 + assert c.r == 0 + assert c.g == 64 + assert c.b == 128 -# eq_(c.a, 255) -# eq_(c.r, 0) -# eq_(c.g, 64) -# eq_(c.b, 128) + assert c.to_hex_string() == '#004080' -# eq_(c.to_hex_string(), '#004080') + c = mapnik.Color(0, 64, 128, 192) -# c = mapnik.Color(0, 64, 128, 192) + assert c.a == 192 + assert c.r == 0 + assert c.g == 64 + assert c.b == 128 -# eq_(c.a, 192) -# eq_(c.r, 0) -# eq_(c.g, 64) -# eq_(c.b, 128) + assert c.to_hex_string() == '#004080c0' -# eq_(c.to_hex_string(), '#004080c0') +def test_color_equality(): -# def test_color_equality(): + c1 = mapnik.Color('blue') + c2 = mapnik.Color(0,0,255) + c3 = mapnik.Color('black') -# c1 = mapnik.Color('blue') -# c2 = mapnik.Color(0,0,255) -# c3 = mapnik.Color('black') + c3.r = 0 + c3.g = 0 + c3.b = 255 + c3.a = 255 -# c3.r = 0 -# c3.g = 0 -# c3.b = 255 -# c3.a = 255 + assert c1 == c2 + assert c1 == c3 -# eq_(c1, c2) -# eq_(c1, c3) + c1 = mapnik.Color(0, 64, 128) + c2 = mapnik.Color(0, 64, 128) + c3 = mapnik.Color(0, 0, 0) -# c1 = mapnik.Color(0, 64, 128) -# c2 = mapnik.Color(0, 64, 128) -# c3 = mapnik.Color(0, 0, 0) + c3.r = 0 + c3.g = 64 + c3.b = 128 -# c3.r = 0 -# c3.g = 64 -# c3.b = 128 + assert c1 == c2 + assert c1 == c3 -# eq_(c1, c2) -# eq_(c1, c3) + c1 = mapnik.Color(0, 64, 128, 192) + c2 = mapnik.Color(0, 64, 128, 192) + c3 = mapnik.Color(0, 0, 0, 255) -# c1 = mapnik.Color(0, 64, 128, 192) -# c2 = mapnik.Color(0, 64, 128, 192) -# c3 = mapnik.Color(0, 0, 0, 255) + c3.r = 0 + c3.g = 64 + c3.b = 128 + c3.a = 192 -# c3.r = 0 -# c3.g = 64 -# c3.b = 128 -# c3.a = 192 + assert c1 == c2 + assert c1 == c3 -# eq_(c1, c2) -# eq_(c1, c3) + c1 = mapnik.Color('rgb(50%,50%,50%)') + c2 = mapnik.Color(128, 128, 128, 255) + c3 = mapnik.Color('#808080') + c4 = mapnik.Color('gray') -# c1 = mapnik.Color('rgb(50%,50%,50%)') -# c2 = mapnik.Color(128, 128, 128, 255) -# c3 = mapnik.Color('#808080') -# c4 = mapnik.Color('gray') + assert c1 == c2 + assert c1 == c3 + assert c1 == c4 -# eq_(c1, c2) -# eq_(c1, c3) -# eq_(c1, c4) + c1 = mapnik.Color('hsl(0, 100%, 50%)') # red + c2 = mapnik.Color('hsl(120, 100%, 50%)') # lime + c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid blue -# c1 = mapnik.Color('hsl(0, 100%, 50%)') # red -# c2 = mapnik.Color('hsl(120, 100%, 50%)') # lime -# c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid blue + assert c1 == mapnik.Color('red') + assert c2 == mapnik.Color('lime') + assert c3, mapnik.Color(0,0,255 == 128) -# eq_(c1, mapnik.Color('red')) -# eq_(c2, mapnik.Color('lime')) -# eq_(c3, mapnik.Color(0,0,255,128)) +def test_rule_init(): + min_scale = 5 + max_scale = 10 -# def test_rule_init(): -# min_scale = 5 -# max_scale = 10 + r = mapnik.Rule() -# r = mapnik.Rule() + assert r.name == '' + assert r.min_scale == 0 + assert r.max_scale == float('inf') + assert r.has_else() == False + assert r.has_also() == False -# eq_(r.name, '') -# eq_(r.min_scale, 0) -# eq_(r.max_scale, float('inf')) -# eq_(r.has_else(), False) -# eq_(r.has_also(), False) + r = mapnik.Rule() -# r = mapnik.Rule() + r.set_else(True) + assert r.has_else() == True + assert r.has_also() == False -# r.set_else(True) -# eq_(r.has_else(), True) -# eq_(r.has_also(), False) + r = mapnik.Rule() -# r = mapnik.Rule() + r.set_also(True) + assert r.has_else() == False + assert r.has_also() == True -# r.set_also(True) -# eq_(r.has_else(), False) -# eq_(r.has_also(), True) + r = mapnik.Rule("Name") -# r = mapnik.Rule("Name") + assert r.name == 'Name' + assert r.min_scale == 0 + assert r.max_scale == float('inf') + assert r.has_else() == False + assert r.has_also() == False -# eq_(r.name, 'Name') -# eq_(r.min_scale, 0) -# eq_(r.max_scale, float('inf')) -# eq_(r.has_else(), False) -# eq_(r.has_also(), False) + r = mapnik.Rule("Name") -# r = mapnik.Rule("Name") + assert r.name == 'Name' + assert r.min_scale == 0 + assert r.max_scale == float('inf') + assert r.has_else() == False + assert r.has_also() == False -# eq_(r.name, 'Name') -# eq_(r.min_scale, 0) -# eq_(r.max_scale, float('inf')) -# eq_(r.has_else(), False) -# eq_(r.has_also(), False) + r = mapnik.Rule("Name", min_scale) -# r = mapnik.Rule("Name", min_scale) + assert r.name == 'Name' + assert r.min_scale == min_scale + assert r.max_scale == float('inf') + assert r.has_else() == False + assert r.has_also() == False -# eq_(r.name, 'Name') -# eq_(r.min_scale, min_scale) -# eq_(r.max_scale, float('inf')) -# eq_(r.has_else(), False) -# eq_(r.has_also(), False) + r = mapnik.Rule("Name", min_scale, max_scale) -# r = mapnik.Rule("Name", min_scale, max_scale) - -# eq_(r.name, 'Name') -# eq_(r.min_scale, min_scale) -# eq_(r.max_scale, max_scale) -# eq_(r.has_else(), False) -# eq_(r.has_also(), False) - -# if __name__ == "__main__": -# setup() -# run_all(eval(x) for x in dir() if x.startswith("test_")) + assert r.name == 'Name' + assert r.min_scale == min_scale + assert r.max_scale == max_scale + assert r.has_else() == False + assert r.has_also() == False diff --git a/test/python_tests/ogr_and_shape_geometries_test.py b/test/python_tests/ogr_and_shape_geometries_test.py index 5c6918e28..1dd2e9219 100644 --- a/test/python_tests/ogr_and_shape_geometries_test.py +++ b/test/python_tests/ogr_and_shape_geometries_test.py @@ -1,43 +1,45 @@ -#!/usr/bin/env python - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +import os +import pytest +import mapnik +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + +try: + import itertools.izip as zip +except ImportError: + pass + # TODO - fix truncation in shapefile... polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))", "POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))", "MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))" "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))" - ] + ] plugins = mapnik.DatasourceCache.plugin_names() if 'shape' in plugins and 'ogr' in plugins: def ensure_geometries_are_interpreted_equivalently(filename): - ds1 = mapnik.Ogr(file=filename,layer_by_index=0) + ds1 = mapnik.Ogr(file=filename, layer_by_index=0) ds2 = mapnik.Shapefile(file=filename) - fs1 = ds1.featureset() - fs2 = ds2.featureset() - count = 0; - import itertools - for feat1,feat2 in itertools.izip(fs1, fs2): + fs1 = iter(ds1) + fs2 = iter(ds2) + count = 0 + for feat1, feat2 in zip(fs1, fs2): count += 1 - eq_(feat1.attributes,feat2.attributes) - # TODO - revisit this: https://github.com/mapnik/mapnik/issues/1093 - # eq_(feat1.to_geojson(),feat2.to_geojson()) - #eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt()) - #eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.NDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.NDR)) - #eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.XDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.XDR)) - - def test_simple_polys(): - ensure_geometries_are_interpreted_equivalently('../data/shp/wkt_poly.shp') + assert feat1.attributes == feat2.attributes + assert feat1.to_geojson() == feat2.to_geojson() + assert feat1.geometry.to_wkt() == feat2.geometry.to_wkt() + assert feat1.geometry.to_wkb(mapnik.wkbByteOrder.NDR) == feat2.geometry.to_wkb(mapnik.wkbByteOrder.NDR) + assert feat1.geometry.to_wkb(mapnik.wkbByteOrder.XDR) == feat2.geometry.to_wkb(mapnik.wkbByteOrder.XDR) -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + def test_simple_polys(setup): + ensure_geometries_are_interpreted_equivalently( + '../data/shp/wkt_poly.shp') diff --git a/test/python_tests/ogr_test.py b/test/python_tests/ogr_test.py index 905eda2bb..76548d6c4 100644 --- a/test/python_tests/ogr_test.py +++ b/test/python_tests/ogr_test.py @@ -1,157 +1,201 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_,assert_almost_equal,raises -from utilities import execution_path, run_all -import os, mapnik +import os +import mapnik +import pytest try: import json except ImportError: import simplejson as json +from .utilities import execution_path + +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'ogr' in mapnik.DatasourceCache.plugin_names(): # Shapefile initialization - def test_shapefile_init(): - ds = mapnik.Ogr(file='../data/shp/boundaries.shp',layer_by_index=0) + def test_shapefile_init(setup): + ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0) e = ds.envelope() - assert_almost_equal(e.minx, -11121.6896651, places=7) - assert_almost_equal(e.miny, -724724.216526, places=6) - assert_almost_equal(e.maxx, 2463000.67866, places=5) - assert_almost_equal(e.maxy, 1649661.267, places=3) + assert e.minx == pytest.approx(-11121.6896651, abs=1e-7) + assert e.miny == pytest.approx(-724724.216526, abs=1e-6) + assert e.maxx == pytest.approx(2463000.67866, abs=1e-5) + assert e.maxy == pytest.approx(1649661.267, abs=1e-3) meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) - eq_('+proj=lcc' in meta['proj4'],True) + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon + assert '+proj=lcc' in meta['proj4'] # Shapefile properties def test_shapefile_properties(): - ds = mapnik.Ogr(file='../data/shp/boundaries.shp',layer_by_index=0) - f = ds.features_at_point(ds.envelope().center(), 0.001).features[0] - eq_(ds.geometry_type(),mapnik.DataGeometryType.Polygon) + ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0) + f = list(ds.features_at_point(ds.envelope().center(), 0.001))[0] + assert ds.geometry_type() == mapnik.DataGeometryType.Polygon - eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9') - eq_(f['COUNTRY'], u'CAN') - eq_(f['F_CODE'], u'FA001') - eq_(f['NAME_EN'], u'Quebec') - eq_(f['Shape_Area'], 1512185733150.0) - eq_(f['Shape_Leng'], 19218883.724300001) + assert f['CGNS_FID'] == u'6f733341ba2011d892e2080020a0f4c9' + assert f['COUNTRY'] == u'CAN' + assert f['F_CODE'] == u'FA001' + assert f['NAME_EN'] == u'Quebec' + assert f['Shape_Area'] == 1512185733150.0 + assert f['Shape_Leng'] == 19218883.724300001 meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) - # NOTE: encoding is latin1 but gdal >= 1.9 should now expose utf8 encoded features - # See SHAPE_ENCODING for overriding: http://gdal.org/ogr/drv_shapefile.html - # Failure for the NOM_FR field is expected for older gdal - #eq_(f['NOM_FR'], u'Qu\xe9bec') - #eq_(f['NOM_FR'], u'Québec') + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' - @raises(RuntimeError) def test_that_nonexistant_query_field_throws(**kwargs): - ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0) - eq_(len(ds.fields()),11) - eq_(ds.fields(),['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME', 'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT']) - eq_(ds.field_types(),['str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float']) - query = mapnik.Query(ds.envelope()) - for fld in ds.fields(): - query.add_property_name(fld) - # also add an invalid one, triggering throw - query.add_property_name('bogus') - ds.features(query) + with pytest.raises(RuntimeError): + ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0) + assert len(ds.fields()) == 11 + assert ds.fields() == ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME', + 'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'] + assert ds.field_types() == ['str','str','str','int','str','int','int','int','int','float','float'] + query = mapnik.Query(ds.envelope()) + for fld in ds.fields(): + query.add_property_name(fld) + # also add an invalid one, triggering throw + query.add_property_name('bogus') + ds.features(query) # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member. - #def test_handling_of_null_features(): - # ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0) - # fs = ds.all_features() - # eq_(len(fs),1) + # def test_handling_of_null_features(): + # ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0) + # fs = iter(ds) + # assert len(list(fs)) == 1 # OGR plugin extent parameter def test_ogr_extent_parameter(): - ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0,extent='-1,-1,1,1') + ds = mapnik.Ogr( + file='../data/shp/world_merc.shp', + layer_by_index=0, + extent='-1,-1,1,1') e = ds.envelope() - eq_(e.minx,-1) - eq_(e.miny,-1) - eq_(e.maxx,1) - eq_(e.maxy,1) + assert e.minx == -1 + assert e.miny == -1 + assert e.maxx == 1 + assert e.maxy == 1 meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) - eq_('+proj=merc' in meta['proj4'],True) + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon + assert '+proj=merc' in meta['proj4'] def test_ogr_reading_gpx_waypoint(): - ds = mapnik.Ogr(file='../data/gpx/empty.gpx',layer='waypoints') + ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer='waypoints') e = ds.envelope() - eq_(e.minx,-122) - eq_(e.miny,48) - eq_(e.maxx,-122) - eq_(e.maxy,48) + assert e.minx == -122 + assert e.miny == 48 + assert e.maxx == -122 + assert e.maxy == 48 meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) - eq_('+proj=longlat' in meta['proj4'],True) + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + assert '+proj=longlat' in meta['proj4'] def test_ogr_empty_data_should_not_throw(): default_logging_severity = mapnik.logger.get_severity() - mapnik.logger.set_severity(mapnik.severity_type.None) + mapnik.logger.set_severity(getattr(mapnik.severity_type, "None")) # use logger to silence expected warnings for layer in ['routes', 'tracks', 'route_points', 'track_points']: - ds = mapnik.Ogr(file='../data/gpx/empty.gpx',layer=layer) + ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer=layer) e = ds.envelope() - eq_(e.minx,0) - eq_(e.miny,0) - eq_(e.maxx,0) - eq_(e.maxy,0) + assert e.minx == 0 + assert e.miny == 0 + assert e.maxx == 0 + assert e.maxy == 0 mapnik.logger.set_severity(default_logging_severity) meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) - eq_('+proj=longlat' in meta['proj4'],True) + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + assert '+proj=longlat' in meta['proj4'] # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member. - #def test_handling_of_null_features(): - # ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0) - # fs = ds.all_features() - # eq_(len(fs),1) + def test_handling_of_null_features(): + assert True + ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0) + fs = iter(ds) + assert len(list(fs)) == 1 def test_geometry_type(): - ds = mapnik.Ogr(file='../data/csv/wkt.csv',layer_by_index=0) + ds = mapnik.Ogr(file='../data/csv/wkt.csv', layer_by_index=0) e = ds.envelope() - assert_almost_equal(e.minx, 1.0, places=1) - assert_almost_equal(e.miny, 1.0, places=1) - assert_almost_equal(e.maxx, 45.0, places=1) - assert_almost_equal(e.maxy, 45.0, places=1) + assert e.minx == pytest.approx(1.0, abs=1e-1) + assert e.miny == pytest.approx(1.0, abs=1e-1) + assert e.maxx == pytest.approx(45.0, abs=1e-1) + assert e.maxy == pytest.approx(45.0, abs=1e-1) meta = ds.describe() - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) - #eq_('+proj=longlat' in meta['proj4'],True) - fs = ds.featureset() - feat = fs.next() + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + fs = iter(ds) + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'Point', u'coordinates': [30, 10]}, u'type': u'Feature', u'id': 2, u'properties': {u'type': u'point', u'WKT': u' POINT (30 10)'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'Point', + u'coordinates': [30,10]}, + u'type': u'Feature', + u'id': 2, + u'properties': {u'type': u'point', + u'WKT': u'POINT (30 10)'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'LineString', u'coordinates': [[30, 10], [10, 30], [40, 40]]}, u'type': u'Feature', u'id': 3, u'properties': {u'type': u'linestring', u'WKT': u' LINESTRING (30 10, 10 30, 40 40)'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'LineString', + u'coordinates': [[30,10],[10,30],[40,40]]}, + u'type': u'Feature', + u'id': 3, + u'properties': {u'type': u'linestring', + u'WKT': u'LINESTRING (30 10, 10 30, 40 40)'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'Polygon', u'coordinates': [[[30, 10], [40, 40], [20, 40], [10, 20], [30, 10]]]}, u'type': u'Feature', u'id': 4, u'properties': {u'type': u'polygon', u'WKT': u' POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'Polygon', u'coordinates': [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}, + u'type': u'Feature', + u'id': 4, + u'properties': {u'type': u'polygon', + u'WKT': u'POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'Polygon', u'coordinates': [[[35, 10], [45, 45], [15, 40], [10, 20], [35, 10]], [[20, 30], [35, 35], [30, 20], [20, 30]]]}, u'type': u'Feature', u'id': 5, u'properties': {u'type': u'polygon', u'WKT': u' POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'Polygon', u'coordinates': [[[35, 10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}, + u'type': u'Feature', + u'id': 5, + u'properties': { u'type': u'polygon', u'WKT': u'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'MultiPoint', u'coordinates': [[10, 40], [40, 30], [20, 20], [30, 10]]}, u'type': u'Feature', u'id': 6, u'properties': {u'type': u'multipoint', u'WKT': u' MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'MultiPoint', + u'coordinates': [[10,40],[40,30],[20,20],[30,10]]}, + u'type': u'Feature', + u'id': 6, + u'properties': {u'type': u'multipoint', + u'WKT': u'MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'MultiLineString', u'coordinates': [[[10, 10], [20, 20], [10, 40]], [[40, 40], [30, 30], [40, 20], [30, 10]]]}, u'type': u'Feature', u'id': 7, u'properties': {u'type': u'multilinestring', u'WKT': u' MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'MultiLineString', + u'coordinates': [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}, + u'type': u'Feature', + u'id': 7, + u'properties': {u'type': u'multilinestring', + u'WKT': u'MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[30, 20], [45, 40], [10, 40], [30, 20]]], [[[15, 5], [40, 10], [10, 20], [5, 10], [15, 5]]]]}, u'type': u'Feature', u'id': 8, u'properties': {u'type': u'multipolygon', u'WKT': u' MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'MultiPolygon', + u'coordinates': [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}, + u'type': u'Feature', + u'id': 8, + u'properties': {u'type': u'multipolygon', + u'WKT': u'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [30, 20]]]]}, u'type': u'Feature', u'id': 9, u'properties': {u'type': u'multipolygon', u'WKT': u' MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}}) - feat = fs.next() + assert actual == {u'geometry': {u'type': u'MultiPolygon', + u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [30, 20]]]]}, + u'type': u'Feature', + u'id': 9, + u'properties': {u'type': u'multipolygon', u'WKT': u'MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}} + feat = next(fs) actual = json.loads(feat.to_geojson()) - eq_(actual,{u'geometry': {u'type': u'GeometryCollection', u'geometries': [{u'type': u'Polygon', u'coordinates': [[[1, 1], [2, 1], [2, 2], [1, 2], [1, 1]]]}, {u'type': u'Point', u'coordinates': [2, 3]}, {u'type': u'LineString', u'coordinates': [[2, 3], [3, 4]]}]}, u'type': u'Feature', u'id': 10, u'properties': {u'type': u'collection', u'WKT': u' GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}}) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert actual == {u'geometry': {u'type': u'GeometryCollection', + u'geometries': [{u'type': u'Polygon', + u'coordinates': [[[1, 1],[2,1],[2,2],[1,2],[1,1]]]}, + {u'type': u'Point', + u'coordinates': [2,3]}, + {u'type': u'LineString', + u'coordinates': [[2,3],[3,4]]}]}, + u'type': u'Feature', + u'id': 10, + u'properties': {u'type': u'collection', + u'WKT': u'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}} diff --git a/test/python_tests/osm_test.py b/test/python_tests/osm_test.py deleted file mode 100644 index b9f519668..000000000 --- a/test/python_tests/osm_test.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik - -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) - -if 'osm' in mapnik.DatasourceCache.plugin_names(): - - # osm initialization - def test_osm_init(): - ds = mapnik.Osm(file='../data/osm/nodes.osm') - - e = ds.envelope() - - # these are hardcoded in the plugin… ugh - eq_(e.minx >= -180.0,True) - eq_(e.miny >= -90.0,True) - eq_(e.maxx <= 180.0,True) - eq_(e.maxy <= 90,True) - - def test_that_nonexistant_query_field_throws(**kwargs): - ds = mapnik.Osm(file='../data/osm/nodes.osm') - eq_(len(ds.fields()),0) - query = mapnik.Query(ds.envelope()) - for fld in ds.fields(): - query.add_property_name(fld) - # also add an invalid one, triggering throw - query.add_property_name('bogus') - ds.features(query) - - def test_that_64bit_int_fields_work(): - ds = mapnik.Osm(file='../data/osm/64bit.osm') - eq_(len(ds.fields()),4) - eq_(ds.fields(),['bigint', 'highway', 'junction', 'note']) - eq_(ds.field_types(),['str', 'str', 'str', 'str']) - fs = ds.featureset() - feat = fs.next() - eq_(feat.to_geojson(),'{"type":"Feature","id":890,"geometry":{"type":"Point","coordinates":[-61.7960248,17.1415874]},"properties":{}}') - eq_(feat.id(),4294968186) - eq_(feat['bigint'], None) - feat = fs.next() - eq_(feat['bigint'],'9223372036854775807') - - def test_reading_ways(): - ds = mapnik.Osm(file='../data/osm/ways.osm') - eq_(len(ds.fields()),0) - eq_(ds.fields(),[]) - eq_(ds.field_types(),[]) - feat = ds.all_features()[4] - eq_(feat.to_geojson(),'{"type":"Feature","id":1,"geometry":{"type":"LineString","coordinates":[[0,2],[0,-2]]},"properties":{}}') - eq_(feat.id(),1) - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/palette_test.py b/test/python_tests/palette_test.py index 9b308953f..98dbb6623 100644 --- a/test/python_tests/palette_test.py +++ b/test/python_tests/palette_test.py @@ -1,14 +1,14 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +import sys, os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield expected_64 = '[Palette 64 colors #494746 #c37631 #89827c #d1955c #7397b9 #fc9237 #a09f9c #fbc147 #9bb3ce #b7c9a1 #b5d29c #c4b9aa #cdc4a5 #d5c8a3 #c1d7aa #ccc4b6 #dbd19c #b2c4d5 #eae487 #c9c8c6 #e4db99 #c9dcb5 #dfd3ac #cbd2c2 #d6cdbc #dbd2b6 #c0ceda #ece597 #f7ef86 #d7d3c3 #dfcbc3 #d1d0cd #d1e2bf #d3dec1 #dbd3c4 #e6d8b6 #f4ef91 #d3d3cf #cad5de #ded7c9 #dfdbce #fcf993 #ffff8a #dbd9d7 #dbe7cd #d4dce2 #e4ded3 #ebe3c9 #e0e2e2 #f4edc3 #fdfcae #e9e5dc #f4edda #eeebe4 #fefdc5 #e7edf2 #edf4e5 #f2efe9 #f6ede7 #fefedd #f6f4f0 #f1f5f8 #fbfaf8 #ffffff]' @@ -16,39 +16,41 @@ def setup(): expected_rgb = '[Palette 2 colors #ff00ff #ffffff]' -def test_reading_palettes(): - act = open('../data/palettes/palette64.act','rb') - palette = mapnik.Palette(act.read(),'act') - eq_(palette.to_string(),expected_64); - act = open('../data/palettes/palette256.act','rb') - palette = mapnik.Palette(act.read(),'act') - eq_(palette.to_string(),expected_256); - palette = mapnik.Palette('\xff\x00\xff\xff\xff\xff', 'rgb') - eq_(palette.to_string(),expected_rgb); + +def test_reading_palettes(setup): + with open('../data/palettes/palette64.act', 'rb') as act: + palette = mapnik.Palette(act.read(), 'act') + assert palette.to_string() == expected_64 + with open('../data/palettes/palette256.act', 'rb') as act: + palette = mapnik.Palette(act.read(), 'act') + assert palette.to_string() == expected_256 + palette = mapnik.Palette(b'\xff\x00\xff\xff\xff\xff', 'rgb') + assert palette.to_string() == expected_rgb if 'shape' in mapnik.DatasourceCache.plugin_names(): def test_render_with_palette(): - m = mapnik.Map(600,400) - mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml') + m = mapnik.Map(600, 400) + mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml') m.zoom_all() - im = mapnik.Image(m.width,m.height) - mapnik.render(m,im) - act = open('../data/palettes/palette256.act','rb') - palette = mapnik.Palette(act.read(),'act') + im = mapnik.Image(m.width, m.height) + mapnik.render(m, im) + with open('../data/palettes/palette256.act', 'rb') as act: + palette = mapnik.Palette(act.read(), 'act') # test saving directly to filesystem - im.save('/tmp/mapnik-palette-test.png','png',palette) - expected = './images/support/mapnik-palette-test.png' + im.save('/tmp/mapnik-palette-test.png', 'png', palette) + expected = 'images/support/mapnik-palette-test.png' if os.environ.get('UPDATE'): - im.save(expected,"png",palette); + im.save(expected, "png", palette) # test saving to a string - open('/tmp/mapnik-palette-test2.png','wb').write(im.tostring('png',palette)); + with open('/tmp/mapnik-palette-test2.png', 'wb') as f: + f.write(im.to_string('png', palette)) # compare the two methods - eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),mapnik.Image.open('/tmp/mapnik-palette-test2.png').tostring('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png','/tmp/mapnik-palette-test2.png')) + im1 = mapnik.Image.open('/tmp/mapnik-palette-test.png') + im2 = mapnik.Image.open('/tmp/mapnik-palette-test2.png') + assert im1.to_string('png32') == im1.to_string('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png', + '/tmp/mapnik-palette-test2.png') # compare to expected - eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),mapnik.Image.open(expected).tostring('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png',expected)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert im1.to_string('png32') == mapnik.Image.open(expected).to_string('png32'), '%s not eq to %s' % ('/tmp/mapnik-palette-test.png', + expected) diff --git a/test/python_tests/parameters_test.py b/test/python_tests/parameters_test.py index 1587fbdde..66507f044 100644 --- a/test/python_tests/parameters_test.py +++ b/test/python_tests/parameters_test.py @@ -1,61 +1,46 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os import sys -from nose.tools import eq_ -from utilities import execution_path, run_all import mapnik -def setup(): - os.chdir(execution_path('.')) - def test_parameter_null(): - p = mapnik.Parameter('key',None) - eq_(p[0],'key') - eq_(p[1],None) + p = mapnik.Parameters() + p['key'] = None + assert p['key'] is None + def test_parameter_string(): - p = mapnik.Parameter('key','value') - eq_(p[0],'key') - eq_(p[1],'value') + p = mapnik.Parameters() + p['key'] = 'value' + assert p['key'] == 'value' + def test_parameter_unicode(): - p = mapnik.Parameter('key',u'value') - eq_(p[0],'key') - eq_(p[1],u'value') + p = mapnik.Parameters() + p['key'] = u'value' + assert p['key'] == u'value' + def test_parameter_integer(): - p = mapnik.Parameter('int',sys.maxint) - eq_(p[0],'int') - eq_(p[1],sys.maxint) + p = mapnik.Parameters() + p['int'] = sys.maxsize + assert p['int'] == sys.maxsize + def test_parameter_double(): - p = mapnik.Parameter('double',float(sys.maxint)) - eq_(p[0],'double') - eq_(p[1],float(sys.maxint)) + p = mapnik.Parameters() + p['double'] = float(sys.maxsize) + assert p['double'] == float(sys.maxsize) + def test_parameter_boolean(): - p = mapnik.Parameter('boolean',True) - eq_(p[0],'boolean') - eq_(p[1],True) - eq_(bool(p[1]),True) + p = mapnik.Parameters() + p['boolean'] = True + assert p['boolean'] == True + assert bool(p['boolean']) == True def test_parameters(): - params = mapnik.Parameters() - p = mapnik.Parameter('float',1.0777) - eq_(p[0],'float') - eq_(p[1],1.0777) - - params.append(p) - - eq_(params[0][0],'float') - eq_(params[0][1],1.0777) - - eq_(params.get('float'),1.0777) - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + p = mapnik.Parameters() + p['float'] = 1.0777 + p['int'] = 123456789 + assert p['float'] == 1.0777 + assert p['int'] == 123456789 diff --git a/test/python_tests/pdf_printing_test.py b/test/python_tests/pdf_printing_test.py new file mode 100644 index 000000000..3240231c0 --- /dev/null +++ b/test/python_tests/pdf_printing_test.py @@ -0,0 +1,50 @@ +import mapnik +import os +import pytest +from .utilities import execution_path + +@pytest.fixture(scope="module") +def setup(): + # All of the paths used are relative, if we run the tests + # from another directory we need to chdir() + os.chdir(execution_path('.')) + yield + +def make_map_from_xml(source_xml): + m = mapnik.Map(100, 100) + mapnik.load_map(m, source_xml, True) + m.zoom_all() + return m + +def make_pdf(m, output_pdf, esri_wkt): + # renders a PDF with a grid and a legend + page = mapnik.printing.PDFPrinter(use_ocg_layers=True) + + page.render_map(m, output_pdf) + page.render_grid_on_map(m) + page.render_legend(m) + + page.finish() + page.add_geospatial_pdf_header(m, output_pdf, wkt=esri_wkt) + +if mapnik.has_pycairo(): + import mapnik.printing + + def test_pdf_printing(setup): + source_xml = '../data/good_maps/marker-text-line.xml'.encode('utf-8') + m = make_map_from_xml(source_xml) + + actual_pdf = "/tmp/pdf-printing-actual.pdf" + esri_wkt = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]' + make_pdf(m, actual_pdf, esri_wkt) + + expected_pdf = 'images/pycairo/pdf-printing-expected.pdf' + + diff = abs(os.stat(expected_pdf).st_size - os.stat(actual_pdf).st_size) + msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff, actual_pdf, 'tests/python_tests/' + expected_pdf) + assert diff < 1500, msg + +# TODO: ideas for further testing on printing module +# - test with and without pangocairo +# - test legend with attribution +# - test graticule (bug at the moment) diff --git a/test/python_tests/pgraster_test.py b/test/python_tests/pgraster_test.py index dc7584f4f..e60e71d8e 100644 --- a/test/python_tests/pgraster_test.py +++ b/test/python_tests/pgraster_test.py @@ -1,39 +1,44 @@ -#!/usr/bin/env python - -from nose.tools import eq_,assert_almost_equal import atexit -import time -from utilities import execution_path, run_all, side_by_side_image -from subprocess import Popen, PIPE -import os, mapnik -import sys +import os import re +import sys +import time from binascii import hexlify +from subprocess import PIPE, Popen +import mapnik +import pytest +from .utilities import execution_path, side_by_side_image MAPNIK_TEST_DBNAME = 'mapnik-tmp-pgraster-test-db' POSTGIS_TEMPLATE_DBNAME = 'template_postgis' -DEBUG_OUTPUT=False +DEBUG_OUTPUT = False + def log(msg): if DEBUG_OUTPUT: - print msg + print(msg) +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) -def call(cmd,silent=False): - stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() +def call(cmd, silent=False): + stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, + stderr=PIPE).communicate() + stdin = stdin.decode() + stderr = stderr.decode() if not stderr: return stdin.strip() elif not silent and 'error' in stderr.lower() \ - or 'not found' in stderr.lower() \ - or 'could not connect' in stderr.lower() \ - or 'bad connection' in stderr.lower() \ - or 'not recognized as an internal' in stderr.lower(): + or 'not found' in stderr.lower() \ + or 'could not connect' in stderr.lower() \ + or 'bad connection' in stderr.lower() \ + or 'not recognized as an internal' in stderr.lower(): raise RuntimeError(stderr.strip()) + def psql_can_connect(): """Test ability to connect to a postgis template db with no options. @@ -46,14 +51,16 @@ def psql_can_connect(): call('psql %s -c "select postgis_version()"' % POSTGIS_TEMPLATE_DBNAME) return True except RuntimeError: - print 'Notice: skipping pgraster tests (connection)' + print('Notice: skipping pgraster tests (connection)') return False + def psql_run(cmd): - cmd = 'psql --set ON_ERROR_STOP=1 %s -c "%s"' % \ - (MAPNIK_TEST_DBNAME, cmd.replace('"', '\\"')) - log('DEBUG: running ' + cmd) - call(cmd) + cmd = 'psql --set ON_ERROR_STOP=1 %s -c "%s"' % \ + (MAPNIK_TEST_DBNAME, cmd.replace('"', '\\"')) + log('DEBUG: running ' + cmd) + call(cmd) + def raster2pgsql_on_path(): """Test for presence of raster2pgsql on the user path. @@ -64,9 +71,10 @@ def raster2pgsql_on_path(): call('raster2pgsql') return True except RuntimeError: - print 'Notice: skipping pgraster tests (raster2pgsql)' + print('Notice: skipping pgraster tests (raster2pgsql)') return False + def createdb_and_dropdb_on_path(): """Test for presence of dropdb/createdb on user path. @@ -77,687 +85,744 @@ def createdb_and_dropdb_on_path(): call('dropdb --help') return True except RuntimeError: - print 'Notice: skipping pgraster tests (createdb/dropdb)' + print('Notice: skipping pgraster tests (createdb/dropdb)') return False + def postgis_setup(): - call('dropdb %s' % MAPNIK_TEST_DBNAME,silent=True) - call('createdb -T %s %s' % (POSTGIS_TEMPLATE_DBNAME,MAPNIK_TEST_DBNAME),silent=False) + call('dropdb %s' % MAPNIK_TEST_DBNAME, silent=True) + call( + 'createdb -T %s %s' % + (POSTGIS_TEMPLATE_DBNAME, + MAPNIK_TEST_DBNAME), + silent=False) + def postgis_takedown(): pass # fails as the db is in use: https://github.com/mapnik/mapnik/issues/960 #call('dropdb %s' % MAPNIK_TEST_DBNAME) + def import_raster(filename, tabname, tilesize, constraint, overview): - log('tile: ' + tilesize + ' constraints: ' + str(constraint) \ - + ' overviews: ' + overview) - cmd = 'raster2pgsql -Y -I -q' - if constraint: - cmd += ' -C' - if tilesize: - cmd += ' -t ' + tilesize - if overview: - cmd += ' -l ' + overview - cmd += ' %s %s | psql --set ON_ERROR_STOP=1 -q %s' % (os.path.abspath(os.path.normpath(filename)),tabname,MAPNIK_TEST_DBNAME) - log('Import call: ' + cmd) - call(cmd) + log('tile: ' + tilesize + ' constraints: ' + str(constraint) + + ' overviews: ' + overview) + cmd = 'raster2pgsql -Y -I -q' + if constraint: + cmd += ' -C' + if tilesize: + cmd += ' -t ' + tilesize + if overview: + cmd += ' -l ' + overview + cmd += ' %s %s | psql --set ON_ERROR_STOP=1 -q %s' % ( + os.path.abspath(os.path.normpath(filename)), tabname, MAPNIK_TEST_DBNAME) + log('Import call: ' + cmd) + call(cmd) + def drop_imported(tabname, overview): - psql_run('DROP TABLE IF EXISTS "' + tabname + '";') - if overview: - for of in overview.split(','): - psql_run('DROP TABLE IF EXISTS "o_' + of + '_' + tabname + '";') - -def compare_images(expected,im): - expected = os.path.join(os.path.dirname(expected),os.path.basename(expected).replace(':','_')) - if not os.path.exists(expected) or os.environ.get('UPDATE'): - print 'generating expected image %s' % expected - im.save(expected,'png32') - expected_im = mapnik.Image.open(expected) - diff = expected.replace('.png','-diff.png') - if len(im.tostring("png32")) != len(expected_im.tostring("png32")): - compared = side_by_side_image(expected_im, im) - compared.save(diff) - assert False,'images do not match, check diff at %s' % diff - else: - if os.path.exists(diff): os.unlink(diff) - return True + psql_run('DROP TABLE IF EXISTS "' + tabname + '";') + if overview: + for of in overview.split(','): + psql_run('DROP TABLE IF EXISTS "o_' + of + '_' + tabname + '";') + + +def compare_images(expected, im): + cur_dir = os.path.dirname(os.path.realpath(__file__)) + expected = os.path.join( + os.path.dirname(expected), + os.path.basename(expected).replace( + ':', + '_')) + expected = os.path.join(cur_dir, expected) + if not os.path.exists(expected) or os.environ.get('UPDATE'): + print('generating expected image %s' % expected) + im.save(expected, 'png32') + expected_im = mapnik.Image.open(expected) + diff = expected.replace('.png', '-diff.png') + if len(im.to_string("png32")) != len(expected_im.to_string("png32")): + compared = side_by_side_image(expected_im, im) + compared.save(diff) + assert False, 'images do not match, check diff at %s' % diff + else: + if os.path.exists(diff): + os.unlink(diff) + return True if 'pgraster' in mapnik.DatasourceCache.plugin_names() \ and createdb_and_dropdb_on_path() \ and psql_can_connect() \ and raster2pgsql_on_path(): - # initialize test database + # initialize test database postgis_setup() # [old]dataraster.tif, 2283x1913 int16 single-band # dataraster-small.tif, 457x383 int16 single-band def _test_dataraster_16bsi_rendering(lbl, overview, rescale, clip): - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table='"dataRaster"', - band=1,use_overviews=1 if overview else 0, - prescale_rasters=rescale,clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['rid'],1) - lyr = mapnik.Layer('dataraster_16bsi') - lyr.datasource = ds - expenv = mapnik.Box2d(-14637, 3903178, 1126863, 4859678) - env = lyr.envelope() - # As the input size is a prime number both horizontally - # and vertically, we expect the extent of the overview - # tables to be a pixel wider than the original, whereas - # the pixel size in geographical units depends on the - # overview factor. So we start with the original pixel size - # as base scale and multiply by the overview factor. - # NOTE: the overview table extent only grows north and east - pixsize = 500 # see gdalinfo dataraster.tif - pixsize = 2497 # see gdalinfo dataraster-small.tif - tol = pixsize * max(overview.split(',')) if overview else 0 - assert_almost_equal(env.minx, expenv.minx) - assert_almost_equal(env.miny, expenv.miny, delta=tol) - assert_almost_equal(env.maxx, expenv.maxx, delta=tol) - assert_almost_equal(env.maxy, expenv.maxy) - mm = mapnik.Map(256, 256) - style = mapnik.Style() - col = mapnik.RasterColorizer(); - col.default_mode = mapnik.COLORIZER_DISCRETE; - col.add_stop(0, mapnik.Color(0x40,0x40,0x40,255)); - col.add_stop(10, mapnik.Color(0x80,0x80,0x80,255)); - col.add_stop(20, mapnik.Color(0xa0,0xa0,0xa0,255)); - sym = mapnik.RasterSymbolizer() - sym.colorizer = col - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - # no data - eq_(im.view(1,1,1,1).tostring(), '\x00\x00\x00\x00') - eq_(im.view(255,255,1,1).tostring(), '\x00\x00\x00\x00') - eq_(im.view(195,116,1,1).tostring(), '\x00\x00\x00\x00') - # A0A0A0 - eq_(im.view(100,120,1,1).tostring(), '\xa0\xa0\xa0\xff') - eq_(im.view( 75, 80,1,1).tostring(), '\xa0\xa0\xa0\xff') - # 808080 - eq_(im.view( 74,170,1,1).tostring(), '\x80\x80\x80\xff') - eq_(im.view( 30, 50,1,1).tostring(), '\x80\x80\x80\xff') - # 404040 - eq_(im.view(190, 70,1,1).tostring(), '\x40\x40\x40\xff') - eq_(im.view(140,170,1,1).tostring(), '\x40\x40\x40\xff') - - # Now zoom over a portion of the env (1/10) - newenv = mapnik.Box2d(273663,4024478,330738,4072303) - mm.zoom_to_box(newenv) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') - # nodata - eq_(hexlify(im.view(255,255,1,1).tostring()), '00000000') - eq_(hexlify(im.view(200,254,1,1).tostring()), '00000000') - # A0A0A0 - eq_(hexlify(im.view(90,232,1,1).tostring()), 'a0a0a0ff') - eq_(hexlify(im.view(96,245,1,1).tostring()), 'a0a0a0ff') - # 808080 - eq_(hexlify(im.view(1,1,1,1).tostring()), '808080ff') - eq_(hexlify(im.view(128,128,1,1).tostring()), '808080ff') - # 404040 - eq_(hexlify(im.view(255, 0,1,1).tostring()), '404040ff') + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table='"dataRaster"', + band=1, use_overviews=1 if overview else 0, + prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['rid'] == 1 + lyr = mapnik.Layer('dataraster_16bsi') + lyr.datasource = ds + expenv = mapnik.Box2d(-14637, 3903178, 1126863, 4859678) + env = lyr.envelope() + # As the input size is a prime number both horizontally + # and vertically, we expect the extent of the overview + # tables to be a pixel wider than the original, whereas + # the pixel size in geographical units depends on the + # overview factor. So we start with the original pixel size + # as base scale and multiply by the overview factor. + # NOTE: the overview table extent only grows north and east + pixsize = 500 # see gdalinfo dataraster.tif + pixsize = 2497 # see gdalinfo dataraster-small.tif + tol = pixsize * max(overview.split(',')) if overview else 0 + assert env.minx == expenv.minx + assert env.miny == pytest.approx(expenv.miny,1.0e-7) + assert env.maxx == pytest.approx(expenv.maxx,1.0e-7) + assert env.maxy == expenv.maxy + mm = mapnik.Map(256, 256) + style = mapnik.Style() + col = mapnik.RasterColorizer() + col.default_mode = mapnik.COLORIZER_DISCRETE + col.add_stop(0, mapnik.Color(0x40, 0x40, 0x40, 255)) + col.add_stop(10, mapnik.Color(0x80, 0x80, 0x80, 255)) + col.add_stop(20, mapnik.Color(0xa0, 0xa0, 0xa0, 255)) + sym = mapnik.RasterSymbolizer() + sym.colorizer = col + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + # no data + assert im.view(1, 1, 1, 1).to_string() == b'\x00\x00\x00\x00' + assert im.view(255, 255, 1, 1).to_string() == b'\x00\x00\x00\x00' + assert im.view(195, 116, 1, 1).to_string() == b'\x00\x00\x00\x00' + # A0A0A0 + assert im.view(100, 120, 1, 1).to_string() == b'\xa0\xa0\xa0\xff' + assert im.view(75, 80, 1, 1).to_string() == b'\xa0\xa0\xa0\xff' + # 808080 + assert im.view(74, 170, 1, 1).to_string() == b'\x80\x80\x80\xff' + assert im.view(30, 50, 1, 1).to_string() == b'\x80\x80\x80\xff' + # 404040 + assert im.view(190, 70, 1, 1).to_string() == b'\x40\x40\x40\xff' + assert im.view(140, 170, 1, 1).to_string() == b'\x40\x40\x40\xff' + + # Now zoom over a portion of the env (1/10) + newenv = mapnik.Box2d(273663, 4024478, 330738, 4072303) + mm.zoom_to_box(newenv) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') + # nodata + assert hexlify(im.view(255, 255, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(200, 254, 1, 1).to_string()) == b'00000000' + # A0A0A0 + assert hexlify(im.view(90, 232, 1, 1).to_string()) == b'a0a0a0ff' + assert hexlify(im.view(96, 245, 1, 1).to_string()) == b'a0a0a0ff' + # 808080 + assert hexlify(im.view(1, 1, 1, 1).to_string()) == b'808080ff' + assert hexlify(im.view(128, 128, 1, 1).to_string()) == b'808080ff' + # 404040 + assert hexlify(im.view(255, 0, 1, 1).to_string()) == b'404040ff' def _test_dataraster_16bsi(lbl, tilesize, constraint, overview): - import_raster('../data/raster/dataraster-small.tif', 'dataRaster', tilesize, constraint, overview) - if constraint: - lbl += ' C' - if tilesize: - lbl += ' T:' + tilesize - if overview: - lbl += ' O:' + overview - for prescale in [0,1]: - for clip in [0,1]: - _test_dataraster_16bsi_rendering(lbl, overview, prescale, clip) - drop_imported('dataRaster', overview) + import_raster( + os.path.join(os.path.dirname(os.path.realpath(__file__)),'../../test/data/raster/dataraster-small.tif'), + 'dataRaster', + tilesize, + constraint, + overview) + if constraint: + lbl += ' C' + if tilesize: + lbl += ' T:' + tilesize + if overview: + lbl += ' O:' + overview + for prescale in [0, 1]: + for clip in [0, 1]: + _test_dataraster_16bsi_rendering(lbl, overview, prescale, clip) + drop_imported('dataRaster', overview) def test_dataraster_16bsi(): - #for tilesize in ['','256x256']: - for tilesize in ['256x256']: - for constraint in [0,1]: - #for overview in ['','4','2,16']: - for overview in ['','2']: - _test_dataraster_16bsi('data_16bsi', tilesize, constraint, overview) - - # river.tiff, RGBA 8BUI + # for tilesize in ['','256x256']: + for tilesize in ['256x256']: + for constraint in [0, 1]: + # for overview in ['','4','2,16']: + for overview in ['', '2']: + _test_dataraster_16bsi( + 'data_16bsi', tilesize, constraint, overview) + + # # river.tiff, RGBA 8BUI def _test_rgba_8bui_rendering(lbl, overview, rescale, clip): - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table='(select * from "River") foo', - use_overviews=1 if overview else 0, - prescale_rasters=rescale,clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['rid'],1) - lyr = mapnik.Layer('rgba_8bui') - lyr.datasource = ds - expenv = mapnik.Box2d(0, -210, 256, 0) - env = lyr.envelope() - # As the input size is a prime number both horizontally - # and vertically, we expect the extent of the overview - # tables to be a pixel wider than the original, whereas - # the pixel size in geographical units depends on the - # overview factor. So we start with the original pixel size - # as base scale and multiply by the overview factor. - # NOTE: the overview table extent only grows north and east - pixsize = 1 # see gdalinfo river.tif - tol = pixsize * max(overview.split(',')) if overview else 0 - assert_almost_equal(env.minx, expenv.minx) - assert_almost_equal(env.miny, expenv.miny, delta=tol) - assert_almost_equal(env.maxx, expenv.maxx, delta=tol) - assert_almost_equal(env.maxy, expenv.maxy) - mm = mapnik.Map(256, 256) - style = mapnik.Style() - sym = mapnik.RasterSymbolizer() - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - expected = 'images/support/pgraster/%s-%s-%s-%s-box1.png' % (lyr.name,lbl,overview,clip) - compare_images(expected,im) - # no data - eq_(hexlify(im.view(3,3,1,1).tostring()), '00000000') - eq_(hexlify(im.view(250,250,1,1).tostring()), '00000000') - # full opaque river color - eq_(hexlify(im.view(175,118,1,1).tostring()), 'b9d8f8ff') - # half-transparent pixel - pxstr = hexlify(im.view(122,138,1,1).tostring()) - apat = ".*(..)$" - match = re.match(apat, pxstr) - assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat - alpha = match.group(1) - assert alpha != 'ff' and alpha != '00', \ - 'unexpected full transparent/opaque pixel: ' + alpha - - # Now zoom over a portion of the env (1/10) - newenv = mapnik.Box2d(166,-105,191,-77) - mm.zoom_to_box(newenv) - t0 = time.time() # we want wall time to include IO waits - im = mapnik.Image(mm.width, mm.height) - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') - expected = 'images/support/pgraster/%s-%s-%s-%s-box2.png' % (lyr.name,lbl,overview,clip) - compare_images(expected,im) - # no data - eq_(hexlify(im.view(255,255,1,1).tostring()), '00000000') - eq_(hexlify(im.view(200,40,1,1).tostring()), '00000000') - # full opaque river color - eq_(hexlify(im.view(100,168,1,1).tostring()), 'b9d8f8ff') - # half-transparent pixel - pxstr = hexlify(im.view(122,138,1,1).tostring()) - apat = ".*(..)$" - match = re.match(apat, pxstr) - assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat - alpha = match.group(1) - assert alpha != 'ff' and alpha != '00', \ - 'unexpected full transparent/opaque pixel: ' + alpha + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table='(select * from "River" order by rid) foo', + use_overviews=1 if overview else 0, + prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['rid'] == 1 + lyr = mapnik.Layer('rgba_8bui') + lyr.datasource = ds + expenv = mapnik.Box2d(0, -210, 256, 0) + env = lyr.envelope() + # As the input size is a prime number both horizontally + # and vertically, we expect the extent of the overview + # tables to be a pixel wider than the original, whereas + # the pixel size in geographical units depends on the + # overview factor. So we start with the original pixel size + # as base scale and multiply by the overview factor. + # NOTE: the overview table extent only grows north and east + pixsize = 1 # see gdalinfo river.tif + tol = pixsize * max(overview.split(',')) if overview else 0 + assert env.minx == expenv.minx + assert env.miny == expenv.miny + assert env.maxx == expenv.maxx + assert env.maxy == expenv.maxy + mm = mapnik.Map(256, 256) + style = mapnik.Style() + sym = mapnik.RasterSymbolizer() + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + expected = './images/support/pgraster/%s-%s-%s-%s-box1.png' % ( + lyr.name, lbl, overview, clip) + compare_images(expected, im) + # no data + assert hexlify(im.view(3, 3, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(250, 250, 1, 1).to_string()) == b'00000000' + # full opaque river color + assert hexlify(im.view(175, 118, 1, 1).to_string()) == b'b9d8f8ff' + # half-transparent pixel + pxstr = hexlify(im.view(122, 138, 1, 1).to_string()).decode() + apat = ".*(..)$" + match = re.match(apat, pxstr) + assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat + alpha = match.group(1) + assert alpha != 'ff' and alpha != '00', \ + 'unexpected full transparent/opaque pixel: ' + alpha + + # Now zoom over a portion of the env (1/10) + newenv = mapnik.Box2d(166, -105, 191, -77) + mm.zoom_to_box(newenv) + t0 = time.time() # we want wall time to include IO waits + im = mapnik.Image(mm.width, mm.height) + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') + expected = './images/support/pgraster/%s-%s-%s-%s-box2.png' % ( + lyr.name, lbl, overview, clip) + compare_images(expected, im) + # no data + assert hexlify(im.view(255, 255, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(200, 40, 1, 1).to_string()) == b'00000000' + # full opaque river color + assert hexlify(im.view(100, 168, 1, 1).to_string()) == b'b9d8f8ff' + # half-transparent pixel + pxstr = hexlify(im.view(122, 138, 1, 1).to_string()).decode() + apat = ".*(..)$" + match = re.match(apat, pxstr) + assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat + alpha = match.group(1) + assert alpha != 'ff' and alpha != '00', \ + 'unexpected full transparent/opaque pixel: ' + alpha def _test_rgba_8bui(lbl, tilesize, constraint, overview): - import_raster('../data/raster/river.tiff', 'River', tilesize, constraint, overview) - if constraint: - lbl += ' C' - if tilesize: - lbl += ' T:' + tilesize - if overview: - lbl += ' O:' + overview - for prescale in [0,1]: - for clip in [0,1]: - _test_rgba_8bui_rendering(lbl, overview, prescale, clip) - drop_imported('River', overview) + import_raster( + os.path.join(os.path.dirname(os.path.realpath(__file__)),'../../test/data/raster/river.tiff'), + 'River', + tilesize, + constraint, + overview) + if constraint: + lbl += ' C' + if tilesize: + lbl += ' T:' + tilesize + if overview: + lbl += ' O:' + overview + for prescale in [0, 1]: + for clip in [0, 1]: + _test_rgba_8bui_rendering(lbl, overview, prescale, clip) + drop_imported('River', overview) def test_rgba_8bui(): - for tilesize in ['','16x16']: - for constraint in [0,1]: - for overview in ['2']: - _test_rgba_8bui('rgba_8bui', tilesize, constraint, overview) + for tilesize in ['', '16x16']: + for constraint in [0, 1]: + for overview in ['2']: + _test_rgba_8bui( + 'rgba_8bui', tilesize, constraint, overview) - # nodata-edge.tif, RGB 8BUI + # # nodata-edge.tif, RGB 8BUI def _test_rgb_8bui_rendering(lbl, tnam, overview, rescale, clip): - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table=tnam, - use_overviews=1 if overview else 0, - prescale_rasters=rescale,clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['rid'],1) - lyr = mapnik.Layer('rgba_8bui') - lyr.datasource = ds - expenv = mapnik.Box2d(-12329035.7652168,4508650.39854396, \ - -12328653.0279471,4508957.34625536) - env = lyr.envelope() - # As the input size is a prime number both horizontally - # and vertically, we expect the extent of the overview - # tables to be a pixel wider than the original, whereas - # the pixel size in geographical units depends on the - # overview factor. So we start with the original pixel size - # as base scale and multiply by the overview factor. - # NOTE: the overview table extent only grows north and east - pixsize = 2 # see gdalinfo nodata-edge.tif - tol = pixsize * max(overview.split(',')) if overview else 0 - assert_almost_equal(env.minx, expenv.minx, places=0) - assert_almost_equal(env.miny, expenv.miny, delta=tol) - assert_almost_equal(env.maxx, expenv.maxx, delta=tol) - assert_almost_equal(env.maxy, expenv.maxy, places=0) - mm = mapnik.Map(256, 256) - style = mapnik.Style() - sym = mapnik.RasterSymbolizer() - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box1.png' % (lyr.name,tnam,lbl,overview,clip) - compare_images(expected,im) - # no data - eq_(hexlify(im.view(3,16,1,1).tostring()), '00000000') - eq_(hexlify(im.view(128,16,1,1).tostring()), '00000000') - eq_(hexlify(im.view(250,16,1,1).tostring()), '00000000') - eq_(hexlify(im.view(3,240,1,1).tostring()), '00000000') - eq_(hexlify(im.view(128,240,1,1).tostring()), '00000000') - eq_(hexlify(im.view(250,240,1,1).tostring()), '00000000') - # dark brown - eq_(hexlify(im.view(174,39,1,1).tostring()), 'c3a698ff') - # dark gray - eq_(hexlify(im.view(195,132,1,1).tostring()), '575f62ff') - # Now zoom over a portion of the env (1/10) - newenv = mapnik.Box2d(-12329035.7652168, 4508926.651484220, \ - -12328997.49148983,4508957.34625536) - mm.zoom_to_box(newenv) - t0 = time.time() # we want wall time to include IO waits - im = mapnik.Image(mm.width, mm.height) - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') - expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box2.png' % (lyr.name,tnam,lbl,overview,clip) - compare_images(expected,im) - # no data - eq_(hexlify(im.view(3,16,1,1).tostring()), '00000000') - eq_(hexlify(im.view(128,16,1,1).tostring()), '00000000') - eq_(hexlify(im.view(250,16,1,1).tostring()), '00000000') - # black - eq_(hexlify(im.view(3,42,1,1).tostring()), '000000ff') - eq_(hexlify(im.view(3,134,1,1).tostring()), '000000ff') - eq_(hexlify(im.view(3,244,1,1).tostring()), '000000ff') - # gray - eq_(hexlify(im.view(135,157,1,1).tostring()), '4e555bff') - # brown - eq_(hexlify(im.view(195,223,1,1).tostring()), 'f2cdbaff') + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=f'(select * from "{tnam}" order by rid) foo', + use_overviews=1 if overview else 0, + prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['rid'] == 1 + lyr = mapnik.Layer('rgba_8bui') + lyr.datasource = ds + expenv = mapnik.Box2d(-12329035.765216826, 4508650.398543958, + -12328653.027947055, 4508957.346255356) + env = lyr.envelope() + # As the input size is a prime number both horizontally + # and vertically, we expect the extent of the overview + # tables to be a pixel wider than the original, whereas + # the pixel size in geographical units depends on the + # overview factor. So we start with the original pixel size + # as base scale and multiply by the overview factor. + # NOTE: the overview table extent only grows north and east + pixsize = 2 # see gdalinfo nodata-edge.tif + tol = pixsize * max(overview.split(',')) if overview else 0 + assert env.minx == expenv.minx + assert env.miny == expenv.miny + assert env.maxx == expenv.maxx + assert env.maxy == expenv.maxy + mm = mapnik.Map(256, 256) + style = mapnik.Style() + sym = mapnik.RasterSymbolizer() + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + expected = './images/support/pgraster/%s-%s-%s-%s-%s-box1.png' % ( + lyr.name, tnam, lbl, overview, clip) + compare_images(expected, im) + # no data + assert hexlify(im.view(3, 16, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(128, 16, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(250, 16, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(3, 240, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(128, 240, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(250, 240, 1, 1).to_string()) == b'00000000' + # dark brown + assert hexlify(im.view(174, 39, 1, 1).to_string()) == b'c3a698ff' + # dark gray + assert hexlify(im.view(195, 132, 1, 1).to_string()) == b'575f62ff' + # Now zoom over a portion of the env (1/10) + newenv = mapnik.Box2d(-12329035.7652168, 4508926.651484220, + -12328997.49148983, 4508957.34625536) + mm.zoom_to_box(newenv) + t0 = time.time() # we want wall time to include IO waits + im = mapnik.Image(mm.width, mm.height) + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10') + expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box2.png' % ( + lyr.name, tnam, lbl, overview, clip) + compare_images(expected, im) + # no data + assert hexlify(im.view(3, 16, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(128, 16, 1, 1).to_string()) == b'00000000' + assert hexlify(im.view(250, 16, 1, 1).to_string()) == b'00000000' + # black + assert hexlify(im.view(3, 42, 1, 1).to_string()) == b'000000ff' + assert hexlify(im.view(3, 134, 1, 1).to_string()) == b'000000ff' + assert hexlify(im.view(3, 244, 1, 1).to_string()) == b'000000ff' + # gray + assert hexlify(im.view(135, 157, 1, 1).to_string()) == b'4e555bff' + # brown + assert hexlify(im.view(195, 223, 1, 1).to_string()) == b'f2cdbaff' def _test_rgb_8bui(lbl, tilesize, constraint, overview): - tnam = 'nodataedge' - import_raster('../data/raster/nodata-edge.tif', tnam, tilesize, constraint, overview) - if constraint: - lbl += ' C' - if tilesize: - lbl += ' T:' + tilesize - if overview: - lbl += ' O:' + overview - for prescale in [0,1]: - for clip in [0,1]: - _test_rgb_8bui_rendering(lbl, tnam, overview, prescale, clip) - #drop_imported(tnam, overview) + tnam = 'nodataedge' + import_raster( + os.path.join(os.path.dirname(os.path.realpath(__file__)),'../../test/data/raster/nodata-edge.tif'), + tnam, + tilesize, + constraint, + overview) + if constraint: + lbl += ' C' + if tilesize: + lbl += ' T:' + tilesize + if overview: + lbl += ' O:' + overview + for prescale in [0, 1]: + for clip in [0, 1]: + _test_rgb_8bui_rendering(lbl, tnam, overview, prescale, clip) + drop_imported(tnam, overview) def test_rgb_8bui(): - for tilesize in ['64x64']: - for constraint in [1]: - for overview in ['']: - _test_rgb_8bui('rgb_8bui', tilesize, constraint, overview) - - def _test_grayscale_subquery(lbl,pixtype,value): - # - # 3 8 13 - # +---+---+---+ - # 3 | v | v | v | NOTE: writes different color - # +---+---+---+ in 13,8 and 8,13 - # 8 | v | v | a | - # +---+---+---+ - # 13 | v | b | v | - # +---+---+---+ - # - val_a = value/3; - val_b = val_a*2; - sql = "(select 3 as i, " \ - " ST_SetValues(" \ - " ST_SetValues(" \ - " ST_AsRaster(" \ - " ST_MakeEnvelope(0,0,14,14), " \ - " 1.0, -1.0, '%s', %s" \ - " ), " \ - " 11, 6, 4, 5, %s::float8" \ - " )," \ - " 6, 11, 5, 4, %s::float8" \ - " ) as \"R\"" \ - ") as foo" % (pixtype,value, val_a, val_b) - rescale = 0 - clip = 0 - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, - raster_field='"R"', use_overviews=1, - prescale_rasters=rescale,clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['i'],3) - lyr = mapnik.Layer('grayscale_subquery') - lyr.datasource = ds - expenv = mapnik.Box2d(0,0,14,14) - env = lyr.envelope() - assert_almost_equal(env.minx, expenv.minx, places=0) - assert_almost_equal(env.miny, expenv.miny, places=0) - assert_almost_equal(env.maxx, expenv.maxx, places=0) - assert_almost_equal(env.maxy, expenv.maxy, places=0) - mm = mapnik.Map(15, 15) - style = mapnik.Style() - sym = mapnik.RasterSymbolizer() - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (lyr.name,lbl,pixtype,value) - compare_images(expected,im) - h = format(value, '02x') - hex_v = h+h+h+'ff' - h = format(val_a, '02x') - hex_a = h+h+h+'ff' - h = format(val_b, '02x') - hex_b = h+h+h+'ff' - eq_(hexlify(im.view( 3, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view(13, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 3, 8,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8, 8,1,1).tostring()), hex_v); - eq_(hexlify(im.view(13, 8,1,1).tostring()), hex_a); - eq_(hexlify(im.view( 3,13,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8,13,1,1).tostring()), hex_b); - eq_(hexlify(im.view(13,13,1,1).tostring()), hex_v); + for tilesize in ['64x64']: + for constraint in [1]: + for overview in ['']: + _test_rgb_8bui('rgb_8bui', tilesize, constraint, overview) + + def _test_grayscale_subquery(lbl, pixtype, value): + # + # 3 8 13 + # +---+---+---+ + # 3 | v | v | v | NOTE: writes different color + # +---+---+---+ in 13,8 and 8,13 + # 8 | v | v | a | + # +---+---+---+ + # 13 | v | b | v | + # +---+---+---+ + # + val_a = int(value / 3) + val_b = val_a * 2 + sql = "(select 3 as i, " \ + " ST_SetValues(" \ + " ST_SetValues(" \ + " ST_AsRaster(" \ + " ST_MakeEnvelope(0,0,14,14), " \ + " 1.0, -1.0, '%s', %s" \ + " ), " \ + " 11, 6, 4, 5, %s::float8" \ + " )," \ + " 6, 11, 5, 4, %s::float8" \ + " ) as \"R\"" \ + ") as foo" % (pixtype, value, val_a, val_b) + rescale = 0 + clip = 0 + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, + raster_field='"R"', use_overviews=1, + prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['i'] == 3 + lyr = mapnik.Layer('grayscale_subquery') + lyr.datasource = ds + expenv = mapnik.Box2d(0, 0, 14, 14) + env = lyr.envelope() + assert env.minx == expenv.minx + assert env.miny == expenv.miny + assert env.maxx == expenv.maxx + assert env.maxy == expenv.maxy + mm = mapnik.Map(15, 15) + style = mapnik.Style() + sym = mapnik.RasterSymbolizer() + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + expected = 'images/support/pgraster/%s-%s-%s-%s.png' % ( + lyr.name, lbl, pixtype, value) + compare_images(expected, im) + h = format(value, '02x') + hex_v = h + h + h + 'ff' + hex_v = hex_v.encode() + h = format(val_a, '02x') + hex_a = h + h + h + 'ff' + hex_a = hex_a.encode() + h = format(val_b, '02x') + hex_b = h + h + h + 'ff' + hex_b = hex_b.encode() + assert hexlify(im.view(3, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(13, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(3, 8, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 8, 1, 1).to_string()) == hex_v + assert hexlify(im.view(13, 8, 1, 1).to_string()) == hex_a + assert hexlify(im.view(3, 13, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 13, 1, 1).to_string()) == hex_b + assert hexlify(im.view(13, 13, 1, 1).to_string()) == hex_v def test_grayscale_2bui_subquery(): - _test_grayscale_subquery('grayscale_2bui_subquery', '2BUI', 3) + _test_grayscale_subquery('grayscale_2bui_subquery', '2BUI', 3) def test_grayscale_4bui_subquery(): - _test_grayscale_subquery('grayscale_4bui_subquery', '4BUI', 15) + _test_grayscale_subquery('grayscale_4bui_subquery', '4BUI', 15) def test_grayscale_8bui_subquery(): - _test_grayscale_subquery('grayscale_8bui_subquery', '8BUI', 63) + _test_grayscale_subquery('grayscale_8bui_subquery', '8BUI', 63) def test_grayscale_8bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_grayscale_subquery('grayscale_8bsi_subquery', '8BSI', 69) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_grayscale_subquery('grayscale_8bsi_subquery', '8BSI', 69) def test_grayscale_16bui_subquery(): - _test_grayscale_subquery('grayscale_16bui_subquery', '16BUI', 126) + _test_grayscale_subquery('grayscale_16bui_subquery', '16BUI', 126) def test_grayscale_16bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_grayscale_subquery('grayscale_16bsi_subquery', '16BSI', 144) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_grayscale_subquery('grayscale_16bsi_subquery', '16BSI', 144) def test_grayscale_32bui_subquery(): - _test_grayscale_subquery('grayscale_32bui_subquery', '32BUI', 255) + _test_grayscale_subquery('grayscale_32bui_subquery', '32BUI', 255) def test_grayscale_32bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_grayscale_subquery('grayscale_32bsi_subquery', '32BSI', 129) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_grayscale_subquery('grayscale_32bsi_subquery', '32BSI', 129) def _test_data_subquery(lbl, pixtype, value): - # - # 3 8 13 - # +---+---+---+ - # 3 | v | v | v | NOTE: writes different values - # +---+---+---+ in 13,8 and 8,13 - # 8 | v | v | a | - # +---+---+---+ - # 13 | v | b | v | - # +---+---+---+ - # - val_a = value/3; - val_b = val_a*2; - sql = "(select 3 as i, " \ - " ST_SetValues(" \ - " ST_SetValues(" \ - " ST_AsRaster(" \ - " ST_MakeEnvelope(0,0,14,14), " \ - " 1.0, -1.0, '%s', %s" \ - " ), " \ - " 11, 6, 5, 5, %s::float8" \ - " )," \ - " 6, 11, 5, 5, %s::float8" \ - " ) as \"R\"" \ - ") as foo" % (pixtype,value, val_a, val_b) - overview = '' - rescale = 0 - clip = 0 - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, - raster_field='R', use_overviews=0 if overview else 0, - band=1, prescale_rasters=rescale, clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['i'],3) - lyr = mapnik.Layer('data_subquery') - lyr.datasource = ds - expenv = mapnik.Box2d(0,0,14,14) - env = lyr.envelope() - assert_almost_equal(env.minx, expenv.minx, places=0) - assert_almost_equal(env.miny, expenv.miny, places=0) - assert_almost_equal(env.maxx, expenv.maxx, places=0) - assert_almost_equal(env.maxy, expenv.maxy, places=0) - mm = mapnik.Map(15, 15) - style = mapnik.Style() - col = mapnik.RasterColorizer(); - col.default_mode = mapnik.COLORIZER_DISCRETE; - col.add_stop(val_a, mapnik.Color(0xff,0x00,0x00,255)); - col.add_stop(val_b, mapnik.Color(0x00,0xff,0x00,255)); - col.add_stop(value, mapnik.Color(0x00,0x00,0xff,255)); - sym = mapnik.RasterSymbolizer() - sym.colorizer = col - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (lyr.name,lbl,pixtype,value) - compare_images(expected,im) + # + # 3 8 13 + # +---+---+---+ + # 3 | v | v | v | NOTE: writes different values + # +---+---+---+ in 13,8 and 8,13 + # 8 | v | v | a | + # +---+---+---+ + # 13 | v | b | v | + # +---+---+---+ + # + val_a = value / 3 + val_b = val_a * 2 + sql = "(select 3 as i, " \ + " ST_SetValues(" \ + " ST_SetValues(" \ + " ST_AsRaster(" \ + " ST_MakeEnvelope(0,0,14,14), " \ + " 1.0, -1.0, '%s', %s" \ + " ), " \ + " 11, 6, 5, 5, %s::float8" \ + " )," \ + " 6, 11, 5, 5, %s::float8" \ + " ) as \"R\"" \ + ") as foo" % (pixtype, value, val_a, val_b) + overview = '' + rescale = 0 + clip = 0 + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, + raster_field='R', use_overviews=0 if overview else 0, + band=1, prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['i'] == 3 + lyr = mapnik.Layer('data_subquery') + lyr.datasource = ds + expenv = mapnik.Box2d(0, 0, 14, 14) + env = lyr.envelope() + assert env.minx == expenv.minx#, places=0) + assert env.miny == expenv.miny#, places=0) + assert env.maxx == expenv.maxx#, places=0) + assert env.maxy == expenv.maxy#, places=0) + mm = mapnik.Map(15, 15) + style = mapnik.Style() + col = mapnik.RasterColorizer() + col.default_mode = mapnik.COLORIZER_DISCRETE + col.add_stop(val_a, mapnik.Color(0xff, 0x00, 0x00, 255)) + col.add_stop(val_b, mapnik.Color(0x00, 0xff, 0x00, 255)) + col.add_stop(value, mapnik.Color(0x00, 0x00, 0xff, 255)) + sym = mapnik.RasterSymbolizer() + sym.colorizer = col + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + expected = 'images/support/pgraster/%s-%s-%s-%s.png' % ( + lyr.name, lbl, pixtype, value) + compare_images(expected, im) def test_data_2bui_subquery(): - _test_data_subquery('data_2bui_subquery', '2BUI', 3) + _test_data_subquery('data_2bui_subquery', '2BUI', 3) def test_data_4bui_subquery(): - _test_data_subquery('data_4bui_subquery', '4BUI', 15) + _test_data_subquery('data_4bui_subquery', '4BUI', 15) def test_data_8bui_subquery(): - _test_data_subquery('data_8bui_subquery', '8BUI', 63) + _test_data_subquery('data_8bui_subquery', '8BUI', 63) def test_data_8bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_data_subquery('data_8bsi_subquery', '8BSI', 69) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_data_subquery('data_8bsi_subquery', '8BSI', 69) def test_data_16bui_subquery(): - _test_data_subquery('data_16bui_subquery', '16BUI', 126) + _test_data_subquery('data_16bui_subquery', '16BUI', 126) def test_data_16bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_data_subquery('data_16bsi_subquery', '16BSI', 135) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_data_subquery('data_16bsi_subquery', '16BSI', 135) def test_data_32bui_subquery(): - _test_data_subquery('data_32bui_subquery', '32BUI', 255) + _test_data_subquery('data_32bui_subquery', '32BUI', 255) def test_data_32bsi_subquery(): - # NOTE: we're using a positive integer because Mapnik - # does not support negative data values anyway - _test_data_subquery('data_32bsi_subquery', '32BSI', 264) + # NOTE: we're using a positive integer because Mapnik + # does not support negative data values anyway + _test_data_subquery('data_32bsi_subquery', '32BSI', 264) def test_data_32bf_subquery(): - _test_data_subquery('data_32bf_subquery', '32BF', 450) + _test_data_subquery('data_32bf_subquery', '32BF', 450) def test_data_64bf_subquery(): - _test_data_subquery('data_64bf_subquery', '64BF', 3072) + _test_data_subquery('data_64bf_subquery', '64BF', 3072) def _test_rgba_subquery(lbl, pixtype, r, g, b, a, g1, b1): - # - # 3 8 13 - # +---+---+---+ - # 3 | v | v | h | NOTE: writes different alpha - # +---+---+---+ in 13,8 and 8,13 - # 8 | v | v | a | - # +---+---+---+ - # 13 | v | b | v | - # +---+---+---+ - # - sql = "(select 3 as i, " \ - " ST_SetValues(" \ - " ST_SetValues(" \ - " ST_AddBand(" \ - " ST_AddBand(" \ - " ST_AddBand(" \ - " ST_AsRaster(" \ - " ST_MakeEnvelope(0,0,14,14), " \ - " 1.0, -1.0, '%s', %s" \ - " )," \ - " '%s', %d::float" \ - " ), " \ - " '%s', %d::float" \ - " ), " \ - " '%s', %d::float" \ - " ), " \ - " 2, 11, 6, 4, 5, %s::float8" \ - " )," \ - " 3, 6, 11, 5, 4, %s::float8" \ - " ) as r" \ - ") as foo" % (pixtype, r, pixtype, g, pixtype, b, pixtype, a, g1, b1) - overview = '' - rescale = 0 - clip = 0 - if rescale: - lbl += ' Sc' - if clip: - lbl += ' Cl' - ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, - raster_field='r', use_overviews=0 if overview else 0, - prescale_rasters=rescale, clip_rasters=clip) - fs = ds.featureset() - feature = fs.next() - eq_(feature['i'],3) - lyr = mapnik.Layer('rgba_subquery') - lyr.datasource = ds - expenv = mapnik.Box2d(0,0,14,14) - env = lyr.envelope() - assert_almost_equal(env.minx, expenv.minx, places=0) - assert_almost_equal(env.miny, expenv.miny, places=0) - assert_almost_equal(env.maxx, expenv.maxx, places=0) - assert_almost_equal(env.maxy, expenv.maxy, places=0) - mm = mapnik.Map(15, 15) - style = mapnik.Style() - sym = mapnik.RasterSymbolizer() - rule = mapnik.Rule() - rule.symbols.append(sym) - style.rules.append(rule) - mm.append_style('foo', style) - lyr.styles.append('foo') - mm.layers.append(lyr) - mm.zoom_to_box(expenv) - im = mapnik.Image(mm.width, mm.height) - t0 = time.time() # we want wall time to include IO waits - mapnik.render(mm, im) - lap = time.time() - t0 - log('T ' + str(lap) + ' -- ' + lbl + ' E:full') - expected = 'images/support/pgraster/%s-%s-%s-%s-%s-%s-%s-%s-%s.png' % (lyr.name,lbl, pixtype, r, g, b, a, g1, b1) - compare_images(expected,im) - hex_v = format(r << 24 | g << 16 | b << 8 | a, '08x') - hex_a = format(r << 24 | g1 << 16 | b << 8 | a, '08x') - hex_b = format(r << 24 | g << 16 | b1 << 8 | a, '08x') - eq_(hexlify(im.view( 3, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view(13, 3,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 3, 8,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8, 8,1,1).tostring()), hex_v); - eq_(hexlify(im.view(13, 8,1,1).tostring()), hex_a); - eq_(hexlify(im.view( 3,13,1,1).tostring()), hex_v); - eq_(hexlify(im.view( 8,13,1,1).tostring()), hex_b); - eq_(hexlify(im.view(13,13,1,1).tostring()), hex_v); + # + # 3 8 13 + # +---+---+---+ + # 3 | v | v | h | NOTE: writes different alpha + # +---+---+---+ in 13,8 and 8,13 + # 8 | v | v | a | + # +---+---+---+ + # 13 | v | b | v | + # +---+---+---+ + # + sql = "(select 3 as i, " \ + " ST_SetValues(" \ + " ST_SetValues(" \ + " ST_AddBand(" \ + " ST_AddBand(" \ + " ST_AddBand(" \ + " ST_AsRaster(" \ + " ST_MakeEnvelope(0,0,14,14), " \ + " 1.0, -1.0, '%s', %s" \ + " )," \ + " '%s', %d::float" \ + " ), " \ + " '%s', %d::float" \ + " ), " \ + " '%s', %d::float" \ + " ), " \ + " 2, 11, 6, 4, 5, %s::float8" \ + " )," \ + " 3, 6, 11, 5, 4, %s::float8" \ + " ) as r" \ + ") as foo" % ( + pixtype, + r, + pixtype, + g, + pixtype, + b, + pixtype, + a, + g1, + b1) + overview = '' + rescale = 0 + clip = 0 + if rescale: + lbl += ' Sc' + if clip: + lbl += ' Cl' + ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql, + raster_field='r', use_overviews=0 if overview else 0, + prescale_rasters=rescale, clip_rasters=clip) + fs = iter(ds) + feature = next(fs) + assert feature['i'] == 3 + lyr = mapnik.Layer('rgba_subquery') + lyr.datasource = ds + expenv = mapnik.Box2d(0, 0, 14, 14) + env = lyr.envelope() + assert env.minx == expenv.minx#, places=0) + assert env.miny == expenv.miny#, places=0) + assert env.maxx == expenv.maxx#, places=0) + assert env.maxy == expenv.maxy#, places=0) + mm = mapnik.Map(15, 15) + style = mapnik.Style() + sym = mapnik.RasterSymbolizer() + rule = mapnik.Rule() + rule.symbolizers.append(sym) + style.rules.append(rule) + mm.append_style('foo', style) + lyr.styles.append('foo') + mm.layers.append(lyr) + mm.zoom_to_box(expenv) + im = mapnik.Image(mm.width, mm.height) + t0 = time.time() # we want wall time to include IO waits + mapnik.render(mm, im) + lap = time.time() - t0 + log('T ' + str(lap) + ' -- ' + lbl + ' E:full') + expected = 'images/support/pgraster/%s-%s-%s-%s-%s-%s-%s-%s-%s.png' % ( + lyr.name, lbl, pixtype, r, g, b, a, g1, b1) + compare_images(expected, im) + hex_v = format(r << 24 | g << 16 | b << 8 | a, '08x').encode() + hex_a = format(r << 24 | g1<< 16 | b << 8 | a, '08x').encode() + hex_b = format(r << 24 | g << 16 | b1 << 8 | a, '08x').encode() + assert hexlify(im.view(3, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(13, 3, 1, 1).to_string()) == hex_v + assert hexlify(im.view(3, 8, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 8, 1, 1).to_string()) == hex_v + assert hexlify(im.view(13, 8, 1, 1).to_string()) == hex_a + assert hexlify(im.view(3, 13, 1, 1).to_string()) == hex_v + assert hexlify(im.view(8, 13, 1, 1).to_string()) == hex_b + assert hexlify(im.view(13, 13, 1, 1).to_string()) == hex_v def test_rgba_8bui_subquery(): - _test_rgba_subquery('rgba_8bui_subquery', '8BUI', 255, 0, 0, 255, 255, 255) + _test_rgba_subquery( + 'rgba_8bui_subquery', + '8BUI', + 255, + 0, + 0, + 255, + 255, + 255) #def test_rgba_16bui_subquery(): - # _test_rgba_subquery('rgba_16bui_subquery', '16BUI', 65535, 0, 0, 65535, 65535, 65535) + # _test_rgba_subquery('rgba_16bui_subquery', '16BUI', 65535, 0, 0, 65535, 65535, 65535) #def test_rgba_32bui_subquery(): - # _test_rgba_subquery('rgba_32bui_subquery', '32BUI') + # _test_rgba_subquery('rgba_32bui_subquery', '32BUI') atexit.register(postgis_takedown) def enabled(tname): - enabled = len(sys.argv) < 2 or tname in sys.argv - if not enabled: - print "Skipping " + tname + " as not explicitly enabled" - return enabled - -if __name__ == "__main__": - setup() - fail = run_all(eval(x) for x in dir() if x.startswith("test_") and enabled(x)) - exit(fail) + enabled = len(sys.argv) < 2 or tname in sys.argv + if not enabled: + print("Skipping " + tname + " as not explicitly enabled") + return enabled diff --git a/test/python_tests/pickling_test.py b/test/python_tests/pickling_test.py index 7a3572d11..4430f6cd9 100644 --- a/test/python_tests/pickling_test.py +++ b/test/python_tests/pickling_test.py @@ -1,44 +1,34 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - import os -from nose.tools import eq_ -from utilities import execution_path, run_all - -import mapnik, pickle +import pickle +import pytest +import mapnik +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + def test_color_pickle(): c = mapnik.Color('blue') - - eq_(pickle.loads(pickle.dumps(c)), c) - + assert pickle.loads(pickle.dumps(c)) == c c = mapnik.Color(0, 64, 128) - - eq_(pickle.loads(pickle.dumps(c)), c) - + assert pickle.loads(pickle.dumps(c)) == c c = mapnik.Color(0, 64, 128, 192) + assert pickle.loads(pickle.dumps(c)) == c - eq_(pickle.loads(pickle.dumps(c)), c) def test_envelope_pickle(): e = mapnik.Box2d(100, 100, 200, 200) + assert pickle.loads(pickle.dumps(e)) == e - eq_(pickle.loads(pickle.dumps(e)), e) - -def test_parameters_pickle(): - params = mapnik.Parameters() - params.append(mapnik.Parameter('oh',str('yeah'))) - - params2 = pickle.loads(pickle.dumps(params,pickle.HIGHEST_PROTOCOL)) - - eq_(params[0][0],params2[0][0]) - eq_(params[0][1],params2[0][1]) +def test_projection_pickle(): + p = mapnik.Projection("epsg:4326") + assert pickle.loads(pickle.dumps(p)).definition() == p.definition() -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +def test_coord_pickle(): + c = mapnik.Coord(-1, 52) + assert pickle.loads(pickle.dumps(c)) == c diff --git a/test/python_tests/png_encoding_test.py b/test/python_tests/png_encoding_test.py index 568edfd78..ed91fed6c 100644 --- a/test/python_tests/png_encoding_test.py +++ b/test/python_tests/png_encoding_test.py @@ -1,103 +1,96 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik -from nose.tools import eq_ -from utilities import execution_path, run_all +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if mapnik.has_png(): tmp_dir = '/tmp/mapnik-png/' if not os.path.exists(tmp_dir): - os.makedirs(tmp_dir) + os.makedirs(tmp_dir) opts = [ - 'png32', - 'png32:t=0', - 'png8:m=o', - 'png8:m=o:c=1', - 'png8:m=o:t=0', - 'png8:m=o:c=1:t=0', - 'png8:m=o:t=1', - 'png8:m=o:t=2', - 'png8:m=h', - 'png8:m=h:c=1', - 'png8:m=h:t=0', - 'png8:m=h:c=1:t=0', - 'png8:m=h:t=1', - 'png8:m=h:t=2', - 'png32:e=miniz', - 'png8:e=miniz' + 'png32', + 'png32:t=0', + 'png8:m=o', + 'png8:m=o:c=1', + 'png8:m=o:t=0', + 'png8:m=o:c=1:t=0', + 'png8:m=o:t=1', + 'png8:m=o:t=2', + 'png8:m=h', + 'png8:m=h:c=1', + 'png8:m=h:t=0', + 'png8:m=h:c=1:t=0', + 'png8:m=h:t=1', + 'png8:m=h:t=2' ] # Todo - use itertools.product #z_opts = range(1,9+1) #t_opts = range(0,2+1) - def gen_filepath(name,format): - return os.path.join('images/support/encoding-opts',name+'-'+format.replace(":","+")+'.png') + def gen_filepath(name, format): + return os.path.join('images/support/encoding-opts', + name + '-' + format.replace(":", "+") + '.png') generate = os.environ.get('UPDATE') - def test_expected_encodings(): + def test_expected_encodings(setup): # blank image - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) for opt in opts: - expected = gen_filepath('solid',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + expected = gen_filepath('solid', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) + print('generating expected image %s' % expected) + im.save(expected, opt) else: - im.save(actual,opt) - eq_(mapnik.Image.open(actual).tostring('png32'), - mapnik.Image.open(expected).tostring('png32'), - '%s (actual) not == to %s (expected)' % (actual,expected)) + im.save(actual, opt) + assert mapnik.Image.open(actual).to_string('png32') == mapnik.Image.open(expected).to_string('png32'), '%s (actual) not == to %s (expected)' % (actual, expected) # solid image im.fill(mapnik.Color('green')) for opt in opts: - expected = gen_filepath('blank',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + expected = gen_filepath('blank', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) + print('generating expected image %s' % expected) + im.save(expected, opt) else: - im.save(actual,opt) - eq_(mapnik.Image.open(actual).tostring('png32'), - mapnik.Image.open(expected).tostring('png32'), - '%s (actual) not == to %s (expected)' % (actual,expected)) + im.save(actual, opt) + assert mapnik.Image.open(actual).to_string('png32') == mapnik.Image.open(expected).to_string('png32'), '%s (actual) not == to %s (expected)' % (actual, expected) # aerial im = mapnik.Image.open('./images/support/transparency/aerial_rgba.png') for opt in opts: - expected = gen_filepath('aerial_rgba',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + expected = gen_filepath('aerial_rgba', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) + print('generating expected image %s' % expected) + im.save(expected, opt) else: - im.save(actual,opt) - eq_(mapnik.Image.open(actual).tostring('png32'), - mapnik.Image.open(expected).tostring('png32'), - '%s (actual) not == to %s (expected)' % (actual,expected)) + im.save(actual, opt) + assert mapnik.Image.open(actual).to_string('png32') == mapnik.Image.open(expected).to_string('png32'), '%s (actual) not == to %s (expected)' % (actual, expected) def test_transparency_levels(): # create partial transparency image - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color('rgba(255,255,255,.5)')) c2 = mapnik.Color('rgba(255,255,0,.2)') c3 = mapnik.Color('rgb(0,255,255)') - for y in range(0,im.height()/2): - for x in range(0,im.width()/2): - im.set_pixel(x,y,c2) - for y in range(im.height()/2,im.height()): - for x in range(im.width()/2,im.width()): - im.set_pixel(x,y,c3) + for y in range(0, int(im.height() / 2)): + for x in range(0, int(im.width() / 2)): + im.set_pixel(x, y, c2) + for y in range(int(im.height() / 2), im.height()): + for x in range(int(im.width() / 2), im.width()): + im.set_pixel(x, y, c3) t0 = tmp_dir + 'white0.png' t2 = tmp_dir + 'white2.png' @@ -105,93 +98,86 @@ def test_transparency_levels(): # octree format = 'png8:m=o:t=0' - im.save(t0,format) + im.save(t0, format) im_in = mapnik.Image.open(t0) - t0_len = len(im_in.tostring(format)) - eq_(t0_len,len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format))) + t0_len = len(im_in.to_string(format)) + assert t0_len == len(mapnik.Image.open('images/support/transparency/white0.png').to_string(format)) format = 'png8:m=o:t=1' - im.save(t1,format) + im.save(t1, format) im_in = mapnik.Image.open(t1) - t1_len = len(im_in.tostring(format)) - eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format))) + t1_len = len(im_in.to_string(format)) + assert len(im.to_string(format)) == len(mapnik.Image.open('images/support/transparency/white1.png').to_string(format)) format = 'png8:m=o:t=2' - im.save(t2,format) + im.save(t2, format) im_in = mapnik.Image.open(t2) - t2_len = len(im_in.tostring(format)) - eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format))) - - eq_(t0_len < t1_len < t2_len,True) + t2_len = len(im_in.to_string(format)) + assert len(im.to_string(format)) == len(mapnik.Image.open('images/support/transparency/white2.png').to_string(format)) + assert t0_len < t1_len < t2_len # hextree format = 'png8:m=h:t=0' - im.save(t0,format) + im.save(t0, format) im_in = mapnik.Image.open(t0) - t0_len = len(im_in.tostring(format)) - eq_(t0_len,len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format))) + t0_len = len(im_in.to_string(format)) + assert t0_len == len(mapnik.Image.open('images/support/transparency/white0.png').to_string(format)) format = 'png8:m=h:t=1' - im.save(t1,format) + im.save(t1, format) im_in = mapnik.Image.open(t1) - t1_len = len(im_in.tostring(format)) - eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format))) + t1_len = len(im_in.to_string(format)) + assert len(im.to_string(format)) == len(mapnik.Image.open('images/support/transparency/white1.png').to_string(format)) format = 'png8:m=h:t=2' - im.save(t2,format) + im.save(t2, format) im_in = mapnik.Image.open(t2) - t2_len = len(im_in.tostring(format)) - eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format))) - - eq_(t0_len < t1_len < t2_len,True) + t2_len = len(im_in.to_string(format)) + assert len(im.to_string(format)) == len(mapnik.Image.open('images/support/transparency/white2.png').to_string(format)) + assert t0_len < t1_len < t2_len def test_transparency_levels_aerial(): im = mapnik.Image.open('../data/images/12_654_1580.png') - im_in = mapnik.Image.open('./images/support/transparency/aerial_rgba.png') - eq_(len(im.tostring('png8')),len(im_in.tostring('png8'))) - eq_(len(im.tostring('png32')),len(im_in.tostring('png32'))) - - im_in = mapnik.Image.open('./images/support/transparency/aerial_rgb.png') - eq_(len(im.tostring('png32')),len(im_in.tostring('png32'))) - eq_(len(im.tostring('png32:t=0')),len(im_in.tostring('png32:t=0'))) - eq_(len(im.tostring('png32:t=0')) == len(im_in.tostring('png32')), False) - eq_(len(im.tostring('png8')),len(im_in.tostring('png8'))) - eq_(len(im.tostring('png8:t=0')),len(im_in.tostring('png8:t=0'))) - # unlike png32 paletted images without alpha will look the same even if no alpha is forced - eq_(len(im.tostring('png8:t=0')) == len(im_in.tostring('png8')), True) - eq_(len(im.tostring('png8:t=0:m=o')) == len(im_in.tostring('png8:m=o')), True) + im_in = mapnik.Image.open( + './images/support/transparency/aerial_rgba.png') + assert len(im.to_string('png8')) == len(im_in.to_string('png8')) + assert len(im.to_string('png32')) == len(im_in.to_string('png32')) + + im_in = mapnik.Image.open( + './images/support/transparency/aerial_rgb.png') + assert len(im.to_string('png32')) == len(im_in.to_string('png32')) + assert len(im.to_string('png32:t=0')) == len(im_in.to_string('png32:t=0')) + assert not len(im.to_string('png32:t=0')) == len(im_in.to_string('png32')) + assert len(im.to_string('png8')) == len(im_in.to_string('png8')) + assert len(im.to_string('png8:t=0')) == len(im_in.to_string('png8:t=0')) + # unlike png32 paletted images without alpha will look the same even if + # no alpha is forced + assert len(im.to_string('png8:t=0')) == len(im_in.to_string('png8')) + assert len(im.to_string('png8:t=0:m=o')) == len(im_in.to_string('png8:m=o')) def test_9_colors_hextree(): expected = './images/support/encoding-opts/png8-9cols.png' im = mapnik.Image.open(expected) t0 = tmp_dir + 'png-encoding-9-colors.result-hextree.png' im.save(t0, 'png8:m=h') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) def test_9_colors_octree(): expected = './images/support/encoding-opts/png8-9cols.png' im = mapnik.Image.open(expected) t0 = tmp_dir + 'png-encoding-9-colors.result-octree.png' im.save(t0, 'png8:m=o') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) def test_17_colors_hextree(): expected = './images/support/encoding-opts/png8-17cols.png' im = mapnik.Image.open(expected) t0 = tmp_dir + 'png-encoding-17-colors.result-hextree.png' im.save(t0, 'png8:m=h') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) def test_17_colors_octree(): expected = './images/support/encoding-opts/png8-17cols.png' im = mapnik.Image.open(expected) t0 = tmp_dir + 'png-encoding-17-colors.result-octree.png' im.save(t0, 'png8:m=o') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) def test_2px_regression_hextree(): im = mapnik.Image.open('./images/support/encoding-opts/png8-2px.A.png') @@ -199,20 +185,11 @@ def test_2px_regression_hextree(): t0 = tmp_dir + 'png-encoding-2px.result-hextree.png' im.save(t0, 'png8:m=h') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) def test_2px_regression_octree(): im = mapnik.Image.open('./images/support/encoding-opts/png8-2px.A.png') expected = './images/support/encoding-opts/png8-2px.png' t0 = tmp_dir + 'png-encoding-2px.result-octree.png' im.save(t0, 'png8:m=o') - eq_(mapnik.Image.open(t0).tostring(), - mapnik.Image.open(expected).tostring(), - '%s (actual) not == to %s (expected)' % (t0, expected)) - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert mapnik.Image.open(t0).to_string() == mapnik.Image.open(expected).to_string(), '%s (actual) not == to %s (expected)' % (t0, expected) diff --git a/test/python_tests/pngsuite_test.py b/test/python_tests/pngsuite_test.py index 4c933eb2d..8c91e27ce 100644 --- a/test/python_tests/pngsuite_test.py +++ b/test/python_tests/pngsuite_test.py @@ -1,35 +1,37 @@ -#!/usr/bin/env python - import os import mapnik -from nose.tools import assert_raises -from utilities import execution_path, run_all +import pytest +from .utilities import execution_path datadir = '../data/pngsuite' +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield def assert_broken_file(fname): - assert_raises(RuntimeError, lambda: mapnik.Image.open(fname)) + with pytest.raises(RuntimeError): + mapnik.Image.open(fname) + def assert_good_file(fname): assert mapnik.Image.open(fname) + def get_pngs(good): - files = [ x for x in os.listdir(datadir) if x.endswith('.png') ] - return [ os.path.join(datadir, x) for x in files if good != x.startswith('x') ] + files = [x for x in os.listdir(datadir) if x.endswith('.png')] + return [os.path.join(datadir, x) + for x in files if good != x.startswith('x')] -def test_good_pngs(): + +def test_good_pngs(setup): for x in get_pngs(True): - yield assert_good_file, x + assert_good_file, x + def test_broken_pngs(): for x in get_pngs(False): - yield assert_broken_file, x - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert_broken_file, x diff --git a/test/python_tests/postgis_test.py b/test/python_tests/postgis_test.py index 42e40cc0d..b17f41950 100644 --- a/test/python_tests/postgis_test.py +++ b/test/python_tests/postgis_test.py @@ -1,32 +1,27 @@ -#!/usr/bin/env python - -from nose.tools import eq_,raises import atexit -from utilities import execution_path, run_all -from subprocess import Popen, PIPE -import os, mapnik +import os +import sys import threading - +from subprocess import PIPE, Popen +import mapnik +import pytest +from .utilities import execution_path MAPNIK_TEST_DBNAME = 'mapnik-tmp-postgis-test-db' POSTGIS_TEMPLATE_DBNAME = 'template_postgis' -SHAPEFILE = os.path.join(execution_path('.'),'../data/shp/world_merc.shp') +SHAPEFILE = os.path.join(execution_path('.'), '../data/shp/world_merc.shp') -def setup(): - # All of the paths used are relative, if we run the tests - # from another directory we need to chdir() - os.chdir(execution_path('.')) - -def call(cmd,silent=False): - stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() +def call(cmd, silent=False): + stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, + stderr=PIPE).communicate() if not stderr: return stdin.strip() - elif not silent and 'error' in stderr.lower() \ - or 'not found' in stderr.lower() \ - or 'could not connect' in stderr.lower() \ - or 'bad connection' in stderr.lower() \ - or 'not recognized as an internal' in stderr.lower(): - raise RuntimeError(stderr.strip()) + msg = str(stderr).lower() + if not silent and 'error' in msg\ + or 'not found' in msg or 'not recognized as an internal' in msg\ + or 'bad connection' in msg or 'could not connect' in msg: + raise RuntimeError(msg.strip()) + def psql_can_connect(): """Test ability to connect to a postgis template db with no options. @@ -40,9 +35,10 @@ def psql_can_connect(): call('psql %s -c "select postgis_version()"' % POSTGIS_TEMPLATE_DBNAME) return True except RuntimeError: - print 'Notice: skipping postgis tests (connection)' + print('Notice: skipping postgis tests (connection)') return False + def shp2pgsql_on_path(): """Test for presence of shp2pgsql on the user path. @@ -52,9 +48,10 @@ def shp2pgsql_on_path(): call('shp2pgsql') return True except RuntimeError: - print 'Notice: skipping postgis tests (shp2pgsql)' + print('Notice: skipping postgis tests (shp2pgsql)') return False + def createdb_and_dropdb_on_path(): """Test for presence of dropdb/createdb on user path. @@ -65,7 +62,7 @@ def createdb_and_dropdb_on_path(): call('dropdb --help') return True except RuntimeError: - print 'Notice: skipping postgis tests (createdb/dropdb)' + print('Notice: skipping postgis tests (createdb/dropdb)') return False insert_table_1 = """ @@ -196,23 +193,85 @@ def createdb_and_dropdb_on_path(): def postgis_setup(): - call('dropdb %s' % MAPNIK_TEST_DBNAME,silent=True) - call('createdb -T %s %s' % (POSTGIS_TEMPLATE_DBNAME,MAPNIK_TEST_DBNAME),silent=False) - call('shp2pgsql -s 3857 -g geom -W LATIN1 %s world_merc | psql -q %s' % (SHAPEFILE,MAPNIK_TEST_DBNAME), silent=True) - call('''psql -q %s -c "CREATE TABLE \"empty\" (key serial);SELECT AddGeometryColumn('','empty','geom','-1','GEOMETRY',4);"''' % MAPNIK_TEST_DBNAME,silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_1),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_2),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_3),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_4),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_5),silent=False) - call("""psql -q %s -c '%s'""" % (MAPNIK_TEST_DBNAME,insert_table_5b),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_6),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_7),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_8),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_9),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_10),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_11),silent=False) - call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_12),silent=False) + call('dropdb %s' % MAPNIK_TEST_DBNAME, silent=True) + call( + 'createdb -T %s %s' % + (POSTGIS_TEMPLATE_DBNAME, + MAPNIK_TEST_DBNAME), + silent=False) + + call('''shp2pgsql -s 3857 -g geom -W LATIN1 %s world_merc | psql -q %s''' % (SHAPEFILE, MAPNIK_TEST_DBNAME), silent=False) + + call( + '''psql -q %s -c "CREATE TABLE \"empty\" (key serial);SELECT AddGeometryColumn('','empty','geom','-1','GEOMETRY',4);"''' % + MAPNIK_TEST_DBNAME, + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_1), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_2), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_3), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_4), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_5), + silent=False) + call( + """psql -q %s -c '%s'""" % + (MAPNIK_TEST_DBNAME, + insert_table_5b), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_6), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_7), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_8), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_9), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_10), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_11), + silent=False) + call( + '''psql -q %s -c "%s"''' % + (MAPNIK_TEST_DBNAME, + insert_table_12), + silent=False) + def postgis_takedown(): pass @@ -228,60 +287,64 @@ def postgis_takedown(): postgis_setup() def test_feature(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc') - fs = ds.featureset() - feature = fs.next() - eq_(feature['gid'],1) - eq_(feature['fips'],u'AC') - eq_(feature['iso2'],u'AG') - eq_(feature['iso3'],u'ATG') - eq_(feature['un'],28) - eq_(feature['name'],u'Antigua and Barbuda') - eq_(feature['area'],44) - eq_(feature['pop2005'],83039) - eq_(feature['region'],19) - eq_(feature['subregion'],29) - eq_(feature['lon'],-61.783) - eq_(feature['lat'],17.078) + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc') + fs = iter(ds) + feature = next(fs) + assert feature['gid'] == 1 + assert feature['fips'] == u'AC' + assert feature['iso2'] == u'AG' + assert feature['iso3'] == u'ATG' + assert feature['un'] == 28 + assert feature['name'] == u'Antigua and Barbuda' + assert feature['area'] == 44 + assert feature['pop2005'] == 83039 + assert feature['region'] == 19 + assert feature['subregion'] == 29 + assert feature['lon'] == -61.783 + assert feature['lat'] == 17.078 meta = ds.describe() - eq_(meta['srid'],3857) - eq_(meta.get('key_field'),None) - eq_(meta['encoding'],u'UTF8') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) + assert meta['srid'] == 3857 + assert meta.get('key_field') == None + assert meta['encoding'] == u'UTF8' + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon def test_subquery(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(select * from world_merc) as w') - fs = ds.featureset() - feature = fs.next() - eq_(feature['gid'],1) - eq_(feature['fips'],u'AC') - eq_(feature['iso2'],u'AG') - eq_(feature['iso3'],u'ATG') - eq_(feature['un'],28) - eq_(feature['name'],u'Antigua and Barbuda') - eq_(feature['area'],44) - eq_(feature['pop2005'],83039) - eq_(feature['region'],19) - eq_(feature['subregion'],29) - eq_(feature['lon'],-61.783) - eq_(feature['lat'],17.078) + ds = mapnik.PostGIS( + dbname=MAPNIK_TEST_DBNAME, + table='(select * from world_merc) as w') + fs = iter(ds) + feature = next(fs) + assert feature['gid'] == 1 + assert feature['fips'] == u'AC' + assert feature['iso2'] == u'AG' + assert feature['iso3'] == u'ATG' + assert feature['un'] == 28 + assert feature['name'] == u'Antigua and Barbuda' + assert feature['area'] == 44 + assert feature['pop2005'] == 83039 + assert feature['region'] == 19 + assert feature['subregion'] == 29 + assert feature['lon'] == -61.783 + assert feature['lat'] == 17.078 meta = ds.describe() - eq_(meta['srid'],3857) - eq_(meta.get('key_field'),None) - eq_(meta['encoding'],u'UTF8') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) - - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(select gid,geom,fips as _fips from world_merc) as w') - fs = ds.featureset() - feature = fs.next() - eq_(feature['gid'],1) - eq_(feature['_fips'],u'AC') - eq_(len(feature),2) + assert meta['srid'] == 3857 + assert meta.get('key_field') == None + assert meta['encoding'] == u'UTF8' + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon + + ds = mapnik.PostGIS( + dbname=MAPNIK_TEST_DBNAME, + table='(select gid,geom,fips as _fips from world_merc) as w') + fs = iter(ds) + feature = next(fs) + assert feature['gid'] == 1 + assert feature['_fips'] == u'AC' + assert len(feature) == 2 meta = ds.describe() - eq_(meta['srid'],3857) - eq_(meta.get('key_field'),None) - eq_(meta['encoding'],u'UTF8') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon) + assert meta['srid'] == 3857 + assert meta.get('key_field') == None + assert meta['encoding'] == u'UTF8' + assert meta['geometry_type'] == mapnik.DataGeometryType.Polygon def test_bad_connection(): try: @@ -290,344 +353,379 @@ def test_bad_connection(): max_size=20, geometry_field='geom', user="rolethatdoesnotexist") - except Exception, e: - assert 'role "rolethatdoesnotexist" does not exist' in str(e) + except Exception as e: + assert 'role "rolethatdoesnotexist" does not exist' in str(e) or \ + 'authentication failed for user "rolethatdoesnotexist"' in str(e) def test_empty_db(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='empty') - fs = ds.featureset() + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty') + fs = ds.features(mapnik.Query(mapnik.Box2d(-180,-90,180,90))) feature = None try: - feature = fs.next() + feature = next(fs) except StopIteration: pass - eq_(feature,None) + assert feature == None meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),None) - eq_(meta['encoding'],u'UTF8') - eq_(meta['geometry_type'],None) + assert meta['srid'] == -1 + assert meta.get('key_field') == None + assert meta['encoding'] == u'UTF8' + assert meta['geometry_type'] == None def test_manual_srid(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,srid=99, table='empty') - fs = ds.featureset() + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, srid=99, table='empty') + fs = ds.features(mapnik.Query(mapnik.Box2d(-180,-90,180,90))) feature = None try: - feature = fs.next() + feature = next(fs) except StopIteration: pass - eq_(feature,None) + assert feature == None meta = ds.describe() - eq_(meta['srid'],99) - eq_(meta.get('key_field'),None) - eq_(meta['encoding'],u'UTF8') - eq_(meta['geometry_type'],None) + assert meta['srid'] == 99 + assert meta.get('key_field') == None + assert meta['encoding'] == u'UTF8' + assert meta['geometry_type'] == None def test_geometry_detection(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test', geometry_field='geom') meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Collection # will fail with postgis 2.0 because it automatically adds a geometry_columns entry - #ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test', + # ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test', # geometry_field='geom', # row_limit=1) - #eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point) + # assert ds.describe()['geometry_type'] == mapnik.DataGeometryType.Point + - @raises(RuntimeError) def test_that_nonexistant_query_field_throws(**kwargs): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='empty') - eq_(len(ds.fields()),1) - eq_(ds.fields(),['key']) - eq_(ds.field_types(),['int']) + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty') + assert len(ds.fields()) == 1 + assert ds.fields() == ['key'] + assert ds.field_types() == ['int'] query = mapnik.Query(ds.envelope()) + for fld in ds.fields(): query.add_property_name(fld) - # also add an invalid one, triggering throw - query.add_property_name('bogus') - ds.features(query) + # also add an invalid one, triggering throw + query.add_property_name('bogus') + with pytest.raises(RuntimeError): + ds.features(query) def test_auto_detection_of_unique_feature_id_32_bit(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test2', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test2', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - eq_(fs.next()['manual_id'],0) - eq_(fs.next()['manual_id'],1) - eq_(fs.next()['manual_id'],1000) - eq_(fs.next()['manual_id'],-1000) - eq_(fs.next()['manual_id'],2147483647) - eq_(fs.next()['manual_id'],-2147483648) - - fs = ds.featureset() - eq_(fs.next().id(),0) - eq_(fs.next().id(),1) - eq_(fs.next().id(),1000) - eq_(fs.next().id(),-1000) - eq_(fs.next().id(),2147483647) - eq_(fs.next().id(),-2147483648) + fs = iter(ds) + f = next(fs) + assert len(ds.fields()) == len(f.attributes) + assert f['manual_id'] == 0 + assert next(fs)['manual_id'] == 1 + assert next(fs)['manual_id'] == 1000 + assert next(fs)['manual_id'] == -1000 + assert next(fs)['manual_id'] == 2147483647 + assert next(fs)['manual_id'] == -2147483648 + + fs = iter(ds) + assert next(fs).id() == 0 + assert next(fs).id() == 1 + assert next(fs).id() == 1000 + assert next(fs).id() == -1000 + assert next(fs).id() == 2147483647 + assert next(fs).id() == -2147483648 + meta = ds.describe() + assert meta['srid'] == 4326 + assert meta.get('key_field') == u'manual_id' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + + def test_auto_detection_of_unique_feature_id_32_bit_no_attribute(): + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test2', + geometry_field='geom', + autodetect_key_field=True, + key_field_as_attribute=False) + fs = iter(ds) + f = next(fs) + assert len(ds.fields()) == len(f.attributes) + assert len(ds.fields()) == 0 + assert len(f.attributes) == 0 + assert f.id() == 0 + assert next(fs).id() == 1 + assert next(fs).id() == 1000 + assert next(fs).id() == -1000 + assert next(fs).id() == 2147483647 + assert next(fs).id() == -2147483648 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),u'manual_id') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == u'manual_id' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_auto_detection_will_fail_since_no_primary_key(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test3', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3', geometry_field='geom', autodetect_key_field=False) - fs = ds.featureset() - feat = fs.next() - eq_(feat['manual_id'],0) - # will fail: https://github.com/mapnik/mapnik/issues/895 - #eq_(feat['non_id'],9223372036854775807) - eq_(fs.next()['manual_id'],1) - eq_(fs.next()['manual_id'],1000) - eq_(fs.next()['manual_id'],-1000) - eq_(fs.next()['manual_id'],2147483647) - eq_(fs.next()['manual_id'],-2147483648) + fs = iter(ds) + feat = next(fs) + assert feat['manual_id'] == 0 + assert feat['non_id'] == 9223372036854775807 + assert next(fs)['manual_id'] == 1 + assert next(fs)['manual_id'] == 1000 + assert next(fs)['manual_id'] == -1000 + assert next(fs)['manual_id'] == 2147483647 + assert next(fs)['manual_id'] == -2147483648 # since no valid primary key will be detected the fallback # is auto-incrementing counter - fs = ds.featureset() - eq_(fs.next().id(),1) - eq_(fs.next().id(),2) - eq_(fs.next().id(),3) - eq_(fs.next().id(),4) - eq_(fs.next().id(),5) - eq_(fs.next().id(),6) + fs = iter(ds) + assert next(fs).id() == 1 + assert next(fs).id() == 2 + assert next(fs).id() == 3 + assert next(fs).id() == 4 + assert next(fs).id() == 5 + assert next(fs).id() == 6 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + - @raises(RuntimeError) def test_auto_detection_will_fail_and_should_throw(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test3', - geometry_field='geom', - autodetect_key_field=True) - ds.featureset() + with pytest.raises(RuntimeError): + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3', + geometry_field='geom', + autodetect_key_field=True) + iter(ds) def test_auto_detection_of_unique_feature_id_64_bit(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test4', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - eq_(fs.next()['manual_id'],0) - eq_(fs.next()['manual_id'],1) - eq_(fs.next()['manual_id'],1000) - eq_(fs.next()['manual_id'],-1000) - eq_(fs.next()['manual_id'],2147483647) - eq_(fs.next()['manual_id'],-2147483648) - - fs = ds.featureset() - eq_(fs.next().id(),0) - eq_(fs.next().id(),1) - eq_(fs.next().id(),1000) - eq_(fs.next().id(),-1000) - eq_(fs.next().id(),2147483647) - eq_(fs.next().id(),-2147483648) + fs = iter(ds) + f = next(fs) + assert len(ds.fields()) == len(f.attributes) + assert f['manual_id'] == 0 + assert next(fs)['manual_id'] == 1 + assert next(fs)['manual_id'] == 1000 + assert next(fs)['manual_id'] == -1000 + assert next(fs)['manual_id'] == 2147483647 + assert next(fs)['manual_id'] == -2147483648 + + fs = iter(ds) + assert next(fs).id() == 0 + assert next(fs).id() == 1 + assert next(fs).id() == 1000 + assert next(fs).id() == -1000 + assert next(fs).id() == 2147483647 + assert next(fs).id() == -2147483648 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),u'manual_id') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == u'manual_id' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_disabled_auto_detection_and_subquery(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, 'a'::varchar as name from test2) as t''', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, 'a'::varchar as name from test2) as t''', geometry_field='geom', autodetect_key_field=False) - fs = ds.featureset() - feat = fs.next() - eq_(feat.id(),1) - eq_(feat['name'],'a') - feat = fs.next() - eq_(feat.id(),2) - eq_(feat['name'],'a') - feat = fs.next() - eq_(feat.id(),3) - eq_(feat['name'],'a') - feat = fs.next() - eq_(feat.id(),4) - eq_(feat['name'],'a') - feat = fs.next() - eq_(feat.id(),5) - eq_(feat['name'],'a') - feat = fs.next() - eq_(feat.id(),6) - eq_(feat['name'],'a') + fs = iter(ds) + feat = next(fs) + assert feat.id() == 1 + assert feat['name'] == 'a' + feat = next(fs) + assert feat.id() == 2 + assert feat['name'] == 'a' + feat = next(fs) + assert feat.id() == 3 + assert feat['name'] == 'a' + feat = next(fs) + assert feat.id() == 4 + assert feat['name'] == 'a' + feat = next(fs) + assert feat.id() == 5 + assert feat['name'] == 'a' + feat = next(fs) + assert feat.id() == 6 + assert feat['name'] == 'a' meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_auto_detection_and_subquery_including_key(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, manual_id from test2) as t''', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id from test2) as t''', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - eq_(fs.next()['manual_id'],0) - eq_(fs.next()['manual_id'],1) - eq_(fs.next()['manual_id'],1000) - eq_(fs.next()['manual_id'],-1000) - eq_(fs.next()['manual_id'],2147483647) - eq_(fs.next()['manual_id'],-2147483648) - - fs = ds.featureset() - eq_(fs.next().id(),0) - eq_(fs.next().id(),1) - eq_(fs.next().id(),1000) - eq_(fs.next().id(),-1000) - eq_(fs.next().id(),2147483647) - eq_(fs.next().id(),-2147483648) + fs = iter(ds) + f = next(fs) + assert len(ds.fields()) == len(f.attributes) + assert f['manual_id'] == 0 + assert next(fs)['manual_id'] == 1 + assert next(fs)['manual_id'] == 1000 + assert next(fs)['manual_id'] == -1000 + assert next(fs)['manual_id'] == 2147483647 + assert next(fs)['manual_id'] == -2147483648 + + fs = iter(ds) + assert next(fs).id() == 0 + assert next(fs).id() == 1 + assert next(fs).id() == 1000 + assert next(fs).id() == -1000 + assert next(fs).id() == 2147483647 + assert next(fs).id() == -2147483648 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),u'manual_id') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == u'manual_id' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + - @raises(RuntimeError) def test_auto_detection_of_invalid_numeric_primary_key(): - mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, manual_id::numeric from test2) as t''', - geometry_field='geom', - autodetect_key_field=True) + with pytest.raises(RuntimeError): + mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id::numeric from test2) as t''', + geometry_field='geom', + autodetect_key_field=True) + - @raises(RuntimeError) def test_auto_detection_of_invalid_multiple_keys(): - mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''test6''', - geometry_field='geom', - autodetect_key_field=True) + with pytest.raises(RuntimeError): + mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''test6''', + geometry_field='geom', + autodetect_key_field=True) + - @raises(RuntimeError) def test_auto_detection_of_invalid_multiple_keys_subquery(): - mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select first_id,second_id,geom from test6) as t''', - geometry_field='geom', - autodetect_key_field=True) + with pytest.raises(RuntimeError): + mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select first_id,second_id,geom from test6) as t''', + geometry_field='geom', + autodetect_key_field=True) def test_manually_specified_feature_id_field(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test4', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4', geometry_field='geom', key_field='manual_id', autodetect_key_field=True) - fs = ds.featureset() - eq_(fs.next()['manual_id'],0) - eq_(fs.next()['manual_id'],1) - eq_(fs.next()['manual_id'],1000) - eq_(fs.next()['manual_id'],-1000) - eq_(fs.next()['manual_id'],2147483647) - eq_(fs.next()['manual_id'],-2147483648) - - fs = ds.featureset() - eq_(fs.next().id(),0) - eq_(fs.next().id(),1) - eq_(fs.next().id(),1000) - eq_(fs.next().id(),-1000) - eq_(fs.next().id(),2147483647) - eq_(fs.next().id(),-2147483648) + fs = iter(ds) + f = next(fs) + assert len(ds.fields()) == len(f.attributes) + assert f['manual_id'] == 0 + assert next(fs)['manual_id'] == 1 + assert next(fs)['manual_id'] == 1000 + assert next(fs)['manual_id'] == -1000 + assert next(fs)['manual_id'] == 2147483647 + assert next(fs)['manual_id'] == -2147483648 + + fs = iter(ds) + assert next(fs).id() == 0 + assert next(fs).id() == 1 + assert next(fs).id() == 1000 + assert next(fs).id() == -1000 + assert next(fs).id() == 2147483647 + assert next(fs).id() == -2147483648 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),u'manual_id') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == u'manual_id' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_numeric_type_feature_id_field(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test5', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test5', geometry_field='geom', autodetect_key_field=False) - fs = ds.featureset() - eq_(fs.next()['manual_id'],-1) - eq_(fs.next()['manual_id'],1) + fs = iter(ds) + assert next(fs)['manual_id'] == -1 + assert next(fs)['manual_id'] == 1 - fs = ds.featureset() - eq_(fs.next().id(),1) - eq_(fs.next().id(),2) + fs = iter(ds) + assert next(fs).id() == 1 + assert next(fs).id() == 2 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_querying_table_with_mixed_case(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='"tableWithMixedCase"', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='"tableWithMixedCase"', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),u'gid') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta.get('key_field') == u'gid' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_querying_subquery_with_mixed_case(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(SeLeCt * FrOm "tableWithMixedCase") as MixedCaseQuery', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(SeLeCt * FrOm "tableWithMixedCase") as MixedCaseQuery', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),u'gid') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta.get('key_field') == u'gid' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_bbox_token_in_subquery1(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table=''' + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table=''' (SeLeCt * FrOm "tableWithMixedCase" where geom && !bbox! ) as MixedCaseQuery''', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),u'gid') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta.get('key_field') == u'gid' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_bbox_token_in_subquery2(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table=''' + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table=''' (SeLeCt * FrOm "tableWithMixedCase" where ST_Intersects(geom,!bbox!) ) as MixedCaseQuery''', geometry_field='geom', autodetect_key_field=True) - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),u'gid') - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta.get('key_field') == u'gid' + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_empty_geom(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test7', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test7', geometry_field='geom') - fs = ds.featureset() - eq_(fs.next()['gid'],1) + fs = iter(ds) + assert next(fs)['gid'] == 1 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Collection def create_ds(): ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test', max_size=20, geometry_field='geom') - fs = ds.all_features() - eq_(len(fs),8) + fs = list(iter(ds)) + assert len(fs) == 8 meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Collection def test_threaded_create(NUM_THREADS=100): # run one to start before thread loop @@ -639,17 +737,17 @@ def test_threaded_create(NUM_THREADS=100): t = threading.Thread(target=create_ds) t.start() t.join() - runs +=1 - eq_(runs,NUM_THREADS) + runs += 1 + assert runs == NUM_THREADS def create_ds_and_error(): try: ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='asdfasdfasdfasdfasdf', max_size=20) - ds.all_features() - except Exception, e: - eq_('in executeQuery' in str(e),True) + iter(ds) + except Exception as e: + assert 'in executeQuery' in str(e) def test_threaded_create2(NUM_THREADS=10): for i in range(NUM_THREADS): @@ -661,23 +759,23 @@ def test_that_64bit_int_fields_work(): ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test8', geometry_field='geom') - eq_(len(ds.fields()),2) - eq_(ds.fields(),['gid','int_field']) - eq_(ds.field_types(),['int','int']) - fs = ds.featureset() - feat = fs.next() - eq_(feat.id(),1) - eq_(feat['gid'],1) - eq_(feat['int_field'],2147483648) - feat = fs.next() - eq_(feat.id(),2) - eq_(feat['gid'],2) - eq_(feat['int_field'],922337203685477580) + assert len(ds.fields()) == 2 + assert ds.fields(), ['gid' == 'int_field'] + assert ds.field_types(), ['int' == 'int'] + fs = iter(ds) + feat = next(fs) + assert feat.id() == 1 + assert feat['gid'] == 1 + assert feat['int_field'] == 2147483648 + feat = next(fs) + assert feat.id() == 2 + assert feat['gid'] == 2 + assert feat['int_field'] == 922337203685477580 meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_persist_connection_off(): # NOTE: max_size should be equal or greater than @@ -686,452 +784,465 @@ def test_persist_connection_off(): # default is 20, so we use that value. See # http://github.com/mapnik/mapnik/issues/863 max_size = 20 - for i in range(0, max_size+1): + for i in range(0, max_size + 1): ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, - max_size=1, # unused - persist_connection=False, - table='(select ST_MakePoint(0,0) as g, pg_backend_pid() as p, 1 as v) as w', - geometry_field='g') - fs = ds.featureset() - eq_(fs.next()['v'], 1) + max_size=1, # unused + persist_connection=False, + table='(select ST_MakePoint(0,0) as g, pg_backend_pid() as p, 1 as v) as w', + geometry_field='g') + fs = iter(ds) + assert next(fs)['v'] == 1 meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == -1 + assert meta['geometry_type'] == mapnik.DataGeometryType.Point def test_null_comparision(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test9', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test9', geometry_field='geom') - fs = ds.featureset() - feat = fs.next() + fs = iter(ds) + feat = next(fs) meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) - - eq_(feat['gid'],1) - eq_(feat['name'],'name') - eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),True) - eq_(mapnik.Expression("[name] = ''").evaluate(feat),False) - eq_(mapnik.Expression("[name] = null").evaluate(feat),False) - eq_(mapnik.Expression("[name] = true").evaluate(feat),False) - eq_(mapnik.Expression("[name] = false").evaluate(feat),False) - eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[name] != ''").evaluate(feat),True) - eq_(mapnik.Expression("[name] != null").evaluate(feat),True) - eq_(mapnik.Expression("[name] != true").evaluate(feat),True) - eq_(mapnik.Expression("[name] != false").evaluate(feat),True) - - feat = fs.next() - eq_(feat['gid'],2) - eq_(feat['name'],'') - eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[name] = ''").evaluate(feat),True) - eq_(mapnik.Expression("[name] = null").evaluate(feat),False) - eq_(mapnik.Expression("[name] = true").evaluate(feat),False) - eq_(mapnik.Expression("[name] = false").evaluate(feat),False) - eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),True) - eq_(mapnik.Expression("[name] != ''").evaluate(feat),False) - eq_(mapnik.Expression("[name] != null").evaluate(feat),True) - eq_(mapnik.Expression("[name] != true").evaluate(feat),True) - eq_(mapnik.Expression("[name] != false").evaluate(feat),True) - - feat = fs.next() - eq_(feat['gid'],3) - eq_(feat['name'],None) # null - eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[name] = ''").evaluate(feat),False) - eq_(mapnik.Expression("[name] = null").evaluate(feat),True) - eq_(mapnik.Expression("[name] = true").evaluate(feat),False) - eq_(mapnik.Expression("[name] = false").evaluate(feat),False) - eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),True) + assert meta['srid'] == -1 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + + assert feat['gid'] == 1 + assert feat['name'] == 'name' + assert mapnik.Expression("[name] = 'name'").evaluate(feat) + assert not mapnik.Expression("[name] = ''").evaluate(feat) + assert not mapnik.Expression("[name] = null").evaluate(feat) + assert not mapnik.Expression("[name] = true").evaluate(feat) + assert not mapnik.Expression("[name] = false").evaluate(feat) + assert not mapnik.Expression("[name] != 'name'").evaluate(feat) + assert mapnik.Expression("[name] != ''").evaluate(feat) + assert mapnik.Expression("[name] != null").evaluate(feat) + assert mapnik.Expression("[name] != true").evaluate(feat) + assert mapnik.Expression("[name] != false").evaluate(feat) + + feat = next(fs) + assert feat['gid'] == 2 + assert feat['name'] == '' + assert mapnik.Expression("[name] = 'name'").evaluate(feat) == False + assert mapnik.Expression("[name] = ''").evaluate(feat) == True + assert mapnik.Expression("[name] = null").evaluate(feat) == False + assert mapnik.Expression("[name] = true").evaluate(feat) == False + assert mapnik.Expression("[name] = false").evaluate(feat) == False + assert mapnik.Expression("[name] != 'name'").evaluate(feat) == True + assert mapnik.Expression("[name] != ''").evaluate(feat) == False + assert mapnik.Expression("[name] != null").evaluate(feat) == True + assert mapnik.Expression("[name] != true").evaluate(feat) == True + assert mapnik.Expression("[name] != false").evaluate(feat) == True + + feat = next(fs) + assert feat['gid'] == 3 + assert feat['name'] == None # null + assert mapnik.Expression("[name] = 'name'").evaluate(feat) == False + assert mapnik.Expression("[name] = ''").evaluate(feat) == False + assert mapnik.Expression("[name] = null").evaluate(feat) == True + assert mapnik.Expression("[name] = true").evaluate(feat) == False + assert mapnik.Expression("[name] = false").evaluate(feat) == False + assert mapnik.Expression("[name] != 'name'").evaluate(feat) == True # https://github.com/mapnik/mapnik/issues/1859 - eq_(mapnik.Expression("[name] != ''").evaluate(feat),False) - eq_(mapnik.Expression("[name] != null").evaluate(feat),False) - eq_(mapnik.Expression("[name] != true").evaluate(feat),True) - eq_(mapnik.Expression("[name] != false").evaluate(feat),True) + assert mapnik.Expression("[name] != ''").evaluate(feat) == False + assert mapnik.Expression("[name] != null").evaluate(feat) == False + assert mapnik.Expression("[name] != true").evaluate(feat) == True + assert mapnik.Expression("[name] != false").evaluate(feat) == True def test_null_comparision2(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test10', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test10', geometry_field='geom') - fs = ds.featureset() - feat = fs.next() + fs = iter(ds) + feat = next(fs) meta = ds.describe() - eq_(meta['srid'],-1) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) - - eq_(feat['gid'],1) - eq_(feat['bool_field'],True) - eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),True) # in 2.1.x used to be False - eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),True) # in 2.1.x used to be False - eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),True) - - feat = fs.next() - eq_(feat['gid'],2) - eq_(feat['bool_field'],False) - eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),True) # in 2.1.x used to be False - eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),False) - - feat = fs.next() - eq_(feat['gid'],3) - eq_(feat['bool_field'],None) # null - eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),True) - eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True) # in 2.1.x used to be False + assert meta['srid'] == -1 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + + assert feat['gid'] == 1 + assert feat['bool_field'] + assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat) + assert not mapnik.Expression("[bool_field] = ''").evaluate(feat) + assert not mapnik.Expression("[bool_field] = null").evaluate(feat) + assert mapnik.Expression("[bool_field] = true").evaluate(feat) + assert not mapnik.Expression("[bool_field] = false").evaluate(feat) + assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat) + assert mapnik.Expression("[bool_field] != ''").evaluate(feat) # in 2.1.x used to be False + assert mapnik.Expression("[bool_field] != null").evaluate(feat) # in 2.1.x used to be False + assert not mapnik.Expression("[bool_field] != true").evaluate(feat) + assert mapnik.Expression("[bool_field] != false").evaluate(feat) + + feat = next(fs) + assert feat['gid'] == 2 + assert not feat['bool_field'] + assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat) + assert not mapnik.Expression("[bool_field] = ''").evaluate(feat) + assert not mapnik.Expression("[bool_field] = null").evaluate(feat) + assert not mapnik.Expression("[bool_field] = true").evaluate(feat) + assert mapnik.Expression("[bool_field] = false").evaluate(feat) + assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat) + assert mapnik.Expression("[bool_field] != ''").evaluate(feat) + assert mapnik.Expression("[bool_field] != null").evaluate(feat) # in 2.1.x used to be False + assert mapnik.Expression("[bool_field] != true").evaluate(feat) + assert not mapnik.Expression("[bool_field] != false").evaluate(feat) + + feat = next(fs) + assert feat['gid'] == 3 + assert feat['bool_field'] == None # null + assert not mapnik.Expression("[bool_field] = 'name'").evaluate(feat) + assert not mapnik.Expression("[bool_field] = ''").evaluate(feat) + assert mapnik.Expression("[bool_field] = null").evaluate(feat) + assert not mapnik.Expression("[bool_field] = true").evaluate(feat) + assert not mapnik.Expression("[bool_field] = false").evaluate(feat) + assert mapnik.Expression("[bool_field] != 'name'").evaluate(feat) # in 2.1.x used to be False # https://github.com/mapnik/mapnik/issues/1859 - eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),False) - eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),True) # in 2.1.x used to be False - eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),True) # in 2.1.x used to be False + assert not mapnik.Expression("[bool_field] != ''").evaluate(feat) + assert not mapnik.Expression("[bool_field] != null").evaluate(feat) + assert mapnik.Expression("[bool_field] != true").evaluate(feat) # in 2.1.x used to be False + assert mapnik.Expression("[bool_field] != false").evaluate(feat) # in 2.1.x used to be False # https://github.com/mapnik/mapnik/issues/1816 def test_exception_message_reporting(): try: - mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='doesnotexist') - except Exception, e: - eq_(e.message != 'unidentifiable C++ exception', True) + mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='doesnotexist') + except Exception as e: + assert str(e) != 'unidentifiable C++ exception' def test_null_id_field(): - opts = {'type':'postgis', - 'dbname':MAPNIK_TEST_DBNAME, - 'geometry_field':'geom', - 'table':"(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"} + opts = {'type': 'postgis', + 'dbname': MAPNIK_TEST_DBNAME, + 'geometry_field': 'geom', + 'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"} ds = mapnik.Datasource(**opts) - fs = ds.featureset() - feat = fs.next() - eq_(feat.id(),1L) - eq_(feat['osm_id'],None) + fs = iter(ds) + feat = next(fs) + assert feat.id() == int(1) + assert feat['osm_id'] == None meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None + assert meta['geometry_type'] == mapnik.DataGeometryType.Point + - @raises(StopIteration) def test_null_key_field(): - opts = {'type':'postgis', + opts = {'type': 'postgis', "key_field": 'osm_id', - 'dbname':MAPNIK_TEST_DBNAME, - 'geometry_field':'geom', - 'table':"(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"} + 'dbname': MAPNIK_TEST_DBNAME, + 'geometry_field': 'geom', + 'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"} ds = mapnik.Datasource(**opts) - fs = ds.featureset() - fs.next() ## should throw since key_field is null: StopIteration: No more features. + fs = iter(ds) + with pytest.raises(StopIteration): + # should throw since key_field is null: StopIteration: No more + # features. + next(fs) def test_psql_error_should_not_break_connection_pool(): # Bad request, will trigger an error when returning result - ds_bad = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table="""(SELECT geom as geom,label::int from test11) as failure_table""", - max_async_connection=5,geometry_field='geom',srid=4326) + ds_bad = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table="""(SELECT geom as geom,label::int from test11) as failure_table""", + max_async_connection=5, geometry_field='geom', srid=4326) # Good request - ds_good = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table="test", - max_async_connection=5,geometry_field='geom',srid=4326) + ds_good = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table="test", + max_async_connection=5, geometry_field='geom', srid=4326) # This will/should trigger a PSQL error failed = False try: - fs = ds_bad.featureset() - for feature in fs: - pass - except RuntimeError, e: - assert 'invalid input syntax for integer' in str(e) + fs = iter(ds_bad) + count = sum(1 for f in fs) + except RuntimeError as e: + assert 'invalid input syntax for type integer' in str(e) failed = True - eq_(failed,True) + assert failed == True # Should be ok - fs = ds_good.featureset() - count = 0 - for feature in fs: - count += 1 - eq_(count,8) - + fs = iter(ds_good) + count = sum(1 for f in fs) + assert count == 8 def test_psql_error_should_give_back_connections_opened_for_lower_layers_to_the_pool(): - map1 = mapnik.Map(600,300) + map1 = mapnik.Map(600, 300) s = mapnik.Style() r = mapnik.Rule() - r.symbols.append(mapnik.PolygonSymbolizer()) + r.symbolizers.append(mapnik.PolygonSymbolizer()) s.rules.append(r) - map1.append_style('style',s) + map1.append_style('style', s) # This layer will fail after a while buggy_s = mapnik.Style() buggy_r = mapnik.Rule() - buggy_r.symbols.append(mapnik.PolygonSymbolizer()) - buggy_r.filter = mapnik.Filter("[fips] = 'FR'") + buggy_r.symbolizers.append(mapnik.PolygonSymbolizer()) + buggy_r.filter = mapnik.Expression("[fips] = 'FR'") buggy_s.rules.append(buggy_r) - map1.append_style('style for buggy layer',buggy_s) + map1.append_style('style for buggy layer', buggy_s) buggy_layer = mapnik.Layer('this layer is buggy at runtime') # We ensure the query wille be long enough - buggy_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(SELECT geom as geom, pg_sleep(0.1), fips::int from world_merc) as failure_tabl', - max_async_connection=2, max_size=2,asynchronous_request = True, geometry_field='geom') + buggy_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(SELECT geom as geom, pg_sleep(0.1), fips::int from world_merc) as failure_tabl', + max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom') buggy_layer.styles.append('style for buggy layer') - # The query for this layer will be sent, then the previous layer will raise an exception before results are read - forced_canceled_layer = mapnik.Layer('this layer will be canceled when an exception stops map rendering') - forced_canceled_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc', - max_async_connection=2, max_size=2, asynchronous_request = True, geometry_field='geom') + # The query for this layer will be sent, then the previous layer will + # raise an exception before results are read + forced_canceled_layer = mapnik.Layer( + 'this layer will be canceled when an exception stops map rendering') + forced_canceled_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc', + max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom') forced_canceled_layer.styles.append('style') map1.layers.append(buggy_layer) map1.layers.append(forced_canceled_layer) map1.zoom_all() - map2 = mapnik.Map(600,300) + map2 = mapnik.Map(600, 300) map2.background = mapnik.Color('steelblue') s = mapnik.Style() r = mapnik.Rule() - r.symbols.append(mapnik.LineSymbolizer()) - r.symbols.append(mapnik.LineSymbolizer()) + r.symbolizers.append(mapnik.LineSymbolizer()) + r.symbolizers.append(mapnik.LineSymbolizer()) s.rules.append(r) - map2.append_style('style',s) + map2.append_style('style', s) layer1 = mapnik.Layer('layer1') - layer1.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc', - max_async_connection=2, max_size=2, asynchronous_request = True, geometry_field='geom') + layer1.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc', + max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom') layer1.styles.append('style') map2.layers.append(layer1) map2.zoom_all() # We expect this to trigger a PSQL error try: - mapnik.render_to_file(map1,'/tmp/mapnik-postgis-test-map1.png', 'png') + mapnik.render_to_file( + map1, '/tmp/mapnik-postgis-test-map1.png', 'png') # Test must fail if error was not raised just above - eq_(False,True) - except RuntimeError, e: - assert 'invalid input syntax for integer' in str(e) + assert False == True + except RuntimeError as e: + assert 'invalid input syntax for type integer' in str(e) pass # This used to raise an exception before correction of issue 2042 - mapnik.render_to_file(map2,'/tmp/mapnik-postgis-test-map2.png', 'png') + mapnik.render_to_file(map2, '/tmp/mapnik-postgis-test-map2.png', 'png') def test_handling_of_zm_dimensions(): ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(select gid,ST_CoordDim(geom) as dim,name,geom from test12) as tmp', geometry_field='geom') - eq_(len(ds.fields()),3) - eq_(ds.fields(),['gid', 'dim', 'name']) - eq_(ds.field_types(),['int', 'int', 'str']) - fs = ds.featureset() + assert len(ds.fields()) == 3 + assert ds.fields(), ['gid', 'dim' == 'name'] + assert ds.field_types(), ['int', 'int' == 'str'] + fs = iter(ds) meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),None) + assert meta['srid'] == 4326 + assert meta.get('key_field') == None # Note: this is incorrect because we only check first couple geoms - eq_(meta['geometry_type'],mapnik.DataGeometryType.Point) + assert meta['geometry_type'] == mapnik.DataGeometryType.Point # Point (2d) - feat = fs.next() - eq_(feat.id(),1) - eq_(feat['gid'],1) - eq_(feat['dim'],2) - eq_(feat['name'],'Point') - eq_(feat.geometry.to_wkt(),'POINT(0 0)') + feat = next(fs) + assert feat.id() == 1 + assert feat['gid'] == 1 + assert feat['dim'] == 2 + assert feat['name'] == 'Point' + assert feat.geometry.to_wkt() == 'POINT(0 0)' # PointZ - feat = fs.next() - eq_(feat.id(),2) - eq_(feat['gid'],2) - eq_(feat['dim'],3) - eq_(feat['name'],'PointZ') - eq_(feat.geometry.to_wkt(),'POINT(0 0)') + feat = next(fs) + assert feat.id() == 2 + assert feat['gid'] == 2 + assert feat['dim'] == 3 + assert feat['name'] == 'PointZ' + assert feat.geometry.to_wkt() == 'POINT(0 0)' # PointM - feat = fs.next() - eq_(feat.id(),3) - eq_(feat['gid'],3) - eq_(feat['dim'],3) - eq_(feat['name'],'PointM') - eq_(feat.geometry.to_wkt(),'POINT(0 0)') + feat = next(fs) + assert feat.id() == 3 + assert feat['gid'] == 3 + assert feat['dim'] == 3 + assert feat['name'] == 'PointM' + assert feat.geometry.to_wkt() == 'POINT(0 0)' # PointZM - feat = fs.next() - eq_(feat.id(),4) - eq_(feat['gid'],4) - eq_(feat['dim'],4) - eq_(feat['name'],'PointZM') + feat = next(fs) + assert feat.id() == 4 + assert feat['gid'] == 4 + assert feat['dim'] == 4 + assert feat['name'] == 'PointZM' - eq_(feat.geometry.to_wkt(),'POINT(0 0)') + assert feat.geometry.to_wkt() == 'POINT(0 0)' # MultiPoint - feat = fs.next() - eq_(feat.id(),5) - eq_(feat['gid'],5) - eq_(feat['dim'],2) - eq_(feat['name'],'MultiPoint') - eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)') + feat = next(fs) + assert feat.id() == 5 + assert feat['gid'] == 5 + assert feat['dim'] == 2 + assert feat['name'] == 'MultiPoint' + assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)' # MultiPointZ - feat = fs.next() - eq_(feat.id(),6) - eq_(feat['gid'],6) - eq_(feat['dim'],3) - eq_(feat['name'],'MultiPointZ') - eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)') + feat = next(fs) + assert feat.id() == 6 + assert feat['gid'] == 6 + assert feat['dim'] == 3 + assert feat['name'] == 'MultiPointZ' + assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)' # MultiPointM - feat = fs.next() - eq_(feat.id(),7) - eq_(feat['gid'],7) - eq_(feat['dim'],3) - eq_(feat['name'],'MultiPointM') - eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)') + feat = next(fs) + assert feat.id() == 7 + assert feat['gid'] == 7 + assert feat['dim'] == 3 + assert feat['name'] == 'MultiPointM' + assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)' # MultiPointZM - feat = fs.next() - eq_(feat.id(),8) - eq_(feat['gid'],8) - eq_(feat['dim'],4) - eq_(feat['name'],'MultiPointZM') - eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)') + feat = next(fs) + assert feat.id() == 8 + assert feat['gid'] == 8 + assert feat['dim'] == 4 + assert feat['name'] == 'MultiPointZM' + assert feat.geometry.to_wkt() == 'MULTIPOINT(0 0,1 1)' # LineString - feat = fs.next() - eq_(feat.id(),9) - eq_(feat['gid'],9) - eq_(feat['dim'],2) - eq_(feat['name'],'LineString') - eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)') + feat = next(fs) + assert feat.id() == 9 + assert feat['gid'] == 9 + assert feat['dim'] == 2 + assert feat['name'] == 'LineString' + assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)' # LineStringZ - feat = fs.next() - eq_(feat.id(),10) - eq_(feat['gid'],10) - eq_(feat['dim'],3) - eq_(feat['name'],'LineStringZ') - eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)') + feat = next(fs) + assert feat.id() == 10 + assert feat['gid'] == 10 + assert feat['dim'] == 3 + assert feat['name'] == 'LineStringZ' + assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)' # LineStringM - feat = fs.next() - eq_(feat.id(),11) - eq_(feat['gid'],11) - eq_(feat['dim'],3) - eq_(feat['name'],'LineStringM') - eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)') + feat = next(fs) + assert feat.id() == 11 + assert feat['gid'] == 11 + assert feat['dim'] == 3 + assert feat['name'] == 'LineStringM' + assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)' # LineStringZM - feat = fs.next() - eq_(feat.id(),12) - eq_(feat['gid'],12) - eq_(feat['dim'],4) - eq_(feat['name'],'LineStringZM') - eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)') + feat = next(fs) + assert feat.id() == 12 + assert feat['gid'] == 12 + assert feat['dim'] == 4 + assert feat['name'] == 'LineStringZM' + assert feat.geometry.to_wkt() == 'LINESTRING(0 0,1 1)' # Polygon - feat = fs.next() - eq_(feat.id(),13) - eq_(feat['gid'],13) - eq_(feat['name'],'Polygon') - eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))') + feat = next(fs) + assert feat.id() == 13 + assert feat['gid'] == 13 + assert feat['name'] == 'Polygon' + assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))' # PolygonZ - feat = fs.next() - eq_(feat.id(),14) - eq_(feat['gid'],14) - eq_(feat['name'],'PolygonZ') - eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))') + feat = next(fs) + assert feat.id() == 14 + assert feat['gid'] == 14 + assert feat['name'] == 'PolygonZ' + assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))' # PolygonM - feat = fs.next() - eq_(feat.id(),15) - eq_(feat['gid'],15) - eq_(feat['name'],'PolygonM') - eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))') + feat = next(fs) + assert feat.id() == 15 + assert feat['gid'] == 15 + assert feat['name'] == 'PolygonM' + assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))' # PolygonZM - feat = fs.next() - eq_(feat.id(),16) - eq_(feat['gid'],16) - eq_(feat['name'],'PolygonZM') - eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))') + feat = next(fs) + assert feat.id() == 16 + assert feat['gid'] == 16 + assert feat['name'] == 'PolygonZM' + assert feat.geometry.to_wkt() == 'POLYGON((0 0,1 1,2 2,0 0))' # MultiLineString - feat = fs.next() - eq_(feat.id(),17) - eq_(feat['gid'],17) - eq_(feat['name'],'MultiLineString') - eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))') + feat = next(fs) + assert feat.id() == 17 + assert feat['gid'] == 17 + assert feat['name'] == 'MultiLineString' + assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))' # MultiLineStringZ - feat = fs.next() - eq_(feat.id(),18) - eq_(feat['gid'],18) - eq_(feat['name'],'MultiLineStringZ') - eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))') + feat = next(fs) + assert feat.id() == 18 + assert feat['gid'] == 18 + assert feat['name'] == 'MultiLineStringZ' + assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))' # MultiLineStringM - feat = fs.next() - eq_(feat.id(),19) - eq_(feat['gid'],19) - eq_(feat['name'],'MultiLineStringM') - eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))') + feat = next(fs) + assert feat.id() == 19 + assert feat['gid'] == 19 + assert feat['name'] == 'MultiLineStringM' + assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))' # MultiLineStringZM - feat = fs.next() - eq_(feat.id(),20) - eq_(feat['gid'],20) - eq_(feat['name'],'MultiLineStringZM') - eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))') + feat = next(fs) + assert feat.id() == 20 + assert feat['gid'] == 20 + assert feat['name'] == 'MultiLineStringZM' + assert feat.geometry.to_wkt() == 'MULTILINESTRING((0 0,1 1),(2 2,3 3))' # MultiPolygon - feat = fs.next() - eq_(feat.id(),21) - eq_(feat['gid'],21) - eq_(feat['name'],'MultiPolygon') - eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))') + feat = next(fs) + assert feat.id() == 21 + assert feat['gid'] == 21 + assert feat['name'] == 'MultiPolygon' + assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))' # MultiPolygonZ - feat = fs.next() - eq_(feat.id(),22) - eq_(feat['gid'],22) - eq_(feat['name'],'MultiPolygonZ') - eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))') + feat = next(fs) + assert feat.id() == 22 + assert feat['gid'] == 22 + assert feat['name'] == 'MultiPolygonZ' + assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))' # MultiPolygonM - feat = fs.next() - eq_(feat.id(),23) - eq_(feat['gid'],23) - eq_(feat['name'],'MultiPolygonM') - eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))') + feat = next(fs) + assert feat.id() == 23 + assert feat['gid'] == 23 + assert feat['name'] == 'MultiPolygonM' + assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))' # MultiPolygonZM - feat = fs.next() - eq_(feat.id(),24) - eq_(feat['gid'],24) - eq_(feat['name'],'MultiPolygonZM') - eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))') + feat = next(fs) + assert feat.id() == 24 + assert feat['gid'] == 24 + assert feat['name'] == 'MultiPolygonZM' + assert feat.geometry.to_wkt() == 'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))' + + def test_handling_of_discarded_key_field(): + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, + table='(select * from test12) as tmp', + key_field='gid', + key_field_as_attribute=False) + fs = iter(ds) + feat = next(fs) + assert feat['name'] == 'Point' def test_variable_in_subquery1(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table=''' - (select * from test where @zoom = 30 ) as tmp''', + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table=''' + (select * from test where !@zoom! = 30 ) as tmp''', geometry_field='geom', srid=4326, autodetect_key_field=True) - fs = ds.featureset(variables={'zoom':30}) - for id in range(1,5): - eq_(fs.next().id(),id) + q = mapnik.Query(ds.envelope()) + q.variables = {'zoom': 30} + fs = ds.features(q) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta.get('key_field'),"gid") - eq_(meta['geometry_type'],None) + assert meta['srid'] == 4326 + assert meta.get('key_field') == "gid" + assert meta['geometry_type'] == None # currently needs manual `geometry_table` passed # to avoid misparse of `geometry_table` @@ -1139,17 +1250,17 @@ def test_variable_in_subquery1(): # https://github.com/mapnik/mapnik/issues/2718 # currently `bogus` would be picked automatically for geometry_table def test_broken_parsing_of_comments(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table=''' + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table=''' (select * FROM test) AS data -- select this from bogus''', geometry_table='test') - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection) + assert meta['srid'] == 4326 + assert meta['geometry_type'] == mapnik.DataGeometryType.Collection # same # to avoid misparse of `geometry_table` @@ -1157,21 +1268,16 @@ def test_broken_parsing_of_comments(): # https://github.com/mapnik/mapnik/issues/2718 # currently nothing would be picked automatically for geometry_table def test_broken_parsing_of_comments(): - ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table=''' + ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table=''' (select * FROM test) AS data -- select this from bogus.''', geometry_table='test') - fs = ds.featureset() - for id in range(1,5): - eq_(fs.next().id(),id) + fs = iter(ds) + for id in range(1, 5): + assert next(fs).id() == id meta = ds.describe() - eq_(meta['srid'],4326) - eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection) - + assert meta['srid'] == 4326 + assert meta['geometry_type'] == mapnik.DataGeometryType.Collection atexit.register(postgis_takedown) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/projection_test.py b/test/python_tests/projection_test.py index a7bdc1455..7fe312a98 100644 --- a/test/python_tests/projection_test.py +++ b/test/python_tests/projection_test.py @@ -1,116 +1,109 @@ -#!/usr/bin/env python - -from nose.tools import eq_,assert_almost_equal - -import mapnik import math -from utilities import run_all, assert_box2d_almost_equal +import sys +import mapnik +import pytest -# Tests that exercise map projections. +from .utilities import assert_box2d_almost_equal -def test_normalizing_definition(): - p = mapnik.Projection('+init=epsg:4326') - expanded = p.expanded() - eq_('+proj=longlat' in expanded,True) +# Tests that exercise map projections. +def test_projection_description(): + p = mapnik.Projection('epsg:4326') + assert 'WGS 84' == p.description() # Trac Ticket #128 def test_wgs84_inverse_forward(): - p = mapnik.Projection('+init=epsg:4326') - + p1 = mapnik.Projection('epsg:4326') + p2 = mapnik.Projection('epsg:4326') + tr = mapnik.ProjTransform(p1, p2) c = mapnik.Coord(3.01331418311, 43.3333092669) e = mapnik.Box2d(-122.54345245, 45.12312553, 68.2335581353, 48.231231233) # It appears that the y component changes very slightly, is this OK? # so we test for 'almost equal float values' - assert_almost_equal(p.inverse(c).y, c.y) - assert_almost_equal(p.inverse(c).x, c.x) - - assert_almost_equal(p.forward(c).y, c.y) - assert_almost_equal(p.forward(c).x, c.x) - - assert_almost_equal(p.inverse(e).center().y, e.center().y) - assert_almost_equal(p.inverse(e).center().x, e.center().x) - - assert_almost_equal(p.forward(e).center().y, e.center().y) - assert_almost_equal(p.forward(e).center().x, e.center().x) - - assert_almost_equal(c.inverse(p).y, c.y) - assert_almost_equal(c.inverse(p).x, c.x) - - assert_almost_equal(c.forward(p).y, c.y) - assert_almost_equal(c.forward(p).x, c.x) - - assert_almost_equal(e.inverse(p).center().y, e.center().y) - assert_almost_equal(e.inverse(p).center().x, e.center().x) - - assert_almost_equal(e.forward(p).center().y, e.center().y) - assert_almost_equal(e.forward(p).center().x, e.center().x) - -def wgs2merc(lon,lat): - x = lon * 20037508.34 / 180; - y = math.log(math.tan((90 + lat) * math.pi / 360)) / (math.pi / 180); - y = y * 20037508.34 / 180; - return [x,y]; - -def merc2wgs(x,y): - x = (x / 20037508.34) * 180; - y = (y / 20037508.34) * 180; - y = 180 / math.pi * (2 * math.atan(math.exp(y * math.pi/180)) - math.pi/2); - if x > 180: x = 180; - if x < -180: x = -180; - if y > 85.0511: y = 85.0511; - if y < -85.0511: y = -85.0511; - return [x,y] - -#echo -109 37 | cs2cs -f "%.10f" +init=epsg:4326 +to +init=epsg:3857 + assert tr.backward(c).y == pytest.approx(c.y) + assert tr.backward(c).x == pytest.approx(c.x) + + assert tr.forward(c).y == pytest.approx(c.y) + assert tr.forward(c).x == pytest.approx(c.x) + + assert tr.backward(e).center().y == pytest.approx(e.center().y) + assert tr.backward(e).center().x == pytest.approx(e.center().x) + + assert tr.forward(e).center().y == pytest.approx(e.center().y) + assert tr.forward(e).center().x == pytest.approx(e.center().x) + +def wgs2merc(lon, lat): + x = lon * 20037508.34 / 180 + y = math.log(math.tan((90 + lat) * math.pi / 360)) / (math.pi / 180) + y = y * 20037508.34 / 180 + return [x, y] + + +def merc2wgs(x, y): + x = (x / 20037508.34) * 180 + y = (y / 20037508.34) * 180 + y = 180 / math.pi * \ + (2 * math.atan(math.exp(y * math.pi / 180)) - math.pi / 2) + if x > 180: + x = 180 + if x < -180: + x = -180 + if y > 85.0511: + y = 85.0511 + if y < -85.0511: + y = -85.0511 + return [x, y] + +# echo -109 37 | cs2cs -f "%.10f" epsg:4326 +to epsg:3857 #-12133824.4964668211 4439106.7872505859 0.0000000000 -## todo +# todo # benchmarks # better well known detection # better srs matching with strip/trim # python copy to avoid crash + def test_proj_transform_between_init_and_literal(): - one = mapnik.Projection('+init=epsg:4326') - two = mapnik.Projection('+init=epsg:3857') - tr1 = mapnik.ProjTransform(one,two) - tr1b = mapnik.ProjTransform(two,one) - wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' - merc = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over' + one = mapnik.Projection('epsg:4326') + two = mapnik.Projection('epsg:3857') + tr1 = mapnik.ProjTransform(one, two) + tr1b = mapnik.ProjTransform(two, one) + wgs84 = 'epsg:4326' + merc = 'epsg:3857' src = mapnik.Projection(wgs84) dest = mapnik.Projection(merc) - tr2 = mapnik.ProjTransform(src,dest) - tr2b = mapnik.ProjTransform(dest,src) - for x in xrange(-180,180,10): - for y in xrange(-60,60,10): - coord = mapnik.Coord(x,y) + tr2 = mapnik.ProjTransform(src, dest) + tr2b = mapnik.ProjTransform(dest, src) + for x in range(-180, 180, 10): + for y in range(-60, 60, 10): + coord = mapnik.Coord(x, y) merc_coord1 = tr1.forward(coord) merc_coord2 = tr1b.backward(coord) merc_coord3 = tr2.forward(coord) merc_coord4 = tr2b.backward(coord) - eq_(math.fabs(merc_coord1.x - merc_coord1.x) < 1,True) - eq_(math.fabs(merc_coord1.x - merc_coord2.x) < 1,True) - eq_(math.fabs(merc_coord1.x - merc_coord3.x) < 1,True) - eq_(math.fabs(merc_coord1.x - merc_coord4.x) < 1,True) - eq_(math.fabs(merc_coord1.y - merc_coord1.y) < 1,True) - eq_(math.fabs(merc_coord1.y - merc_coord2.y) < 1,True) - eq_(math.fabs(merc_coord1.y - merc_coord3.y) < 1,True) - eq_(math.fabs(merc_coord1.y - merc_coord4.y) < 1,True) + assert math.fabs(merc_coord1.x - merc_coord1.x) < 1 + assert math.fabs(merc_coord1.x - merc_coord2.x) < 1 + assert math.fabs(merc_coord1.x - merc_coord3.x) < 1 + assert math.fabs(merc_coord1.x - merc_coord4.x) < 1 + assert math.fabs(merc_coord1.y - merc_coord1.y) < 1 + assert math.fabs(merc_coord1.y - merc_coord2.y) < 1 + assert math.fabs(merc_coord1.y - merc_coord3.y) < 1 + assert math.fabs(merc_coord1.y - merc_coord4.y) < 1 lon_lat_coord1 = tr1.backward(merc_coord1) lon_lat_coord2 = tr1b.forward(merc_coord2) lon_lat_coord3 = tr2.backward(merc_coord3) lon_lat_coord4 = tr2b.forward(merc_coord4) - eq_(math.fabs(coord.x - lon_lat_coord1.x) < 1,True) - eq_(math.fabs(coord.x - lon_lat_coord2.x) < 1,True) - eq_(math.fabs(coord.x - lon_lat_coord3.x) < 1,True) - eq_(math.fabs(coord.x - lon_lat_coord4.x) < 1,True) - eq_(math.fabs(coord.y - lon_lat_coord1.y) < 1,True) - eq_(math.fabs(coord.y - lon_lat_coord2.y) < 1,True) - eq_(math.fabs(coord.y - lon_lat_coord3.y) < 1,True) - eq_(math.fabs(coord.y - lon_lat_coord4.y) < 1,True) + assert math.fabs(coord.x - lon_lat_coord1.x) < 1 + assert math.fabs(coord.x - lon_lat_coord2.x) < 1 + assert math.fabs(coord.x - lon_lat_coord3.x) < 1 + assert math.fabs(coord.x - lon_lat_coord4.x) < 1 + assert math.fabs(coord.y - lon_lat_coord1.y) < 1 + assert math.fabs(coord.y - lon_lat_coord2.y) < 1 + assert math.fabs(coord.y - lon_lat_coord3.y) < 1 + assert math.fabs(coord.y - lon_lat_coord4.y) < 1 # Github Issue #2648 @@ -118,13 +111,14 @@ def test_proj_antimeridian_bbox(): # this is logic from feature_style_processor::prepare_layer() PROJ_ENVELOPE_POINTS = 20 # include/mapnik/config.hpp - prjGeog = mapnik.Projection('+init=epsg:4326') - prjProj = mapnik.Projection('+init=epsg:2193') + prjGeog = mapnik.Projection('epsg:4326') + prjProj = mapnik.Projection('epsg:2193') prj_trans_fwd = mapnik.ProjTransform(prjProj, prjGeog) prj_trans_rev = mapnik.ProjTransform(prjGeog, prjProj) # bad = mapnik.Box2d(-177.31453250437079, -62.33374815225163, 178.02778363316355, -24.584597490955804) - better = mapnik.Box2d(-180.0, -62.33374815225163, 180.0, -24.584597490955804) + better = mapnik.Box2d(-180.0, -62.33374815225163, + 180.0, -24.584597490955804) buffered_query_ext = mapnik.Box2d(274000, 3087000, 3327000, 7173000) fwd_ext = prj_trans_fwd.forward(buffered_query_ext, PROJ_ENVELOPE_POINTS) @@ -136,7 +130,8 @@ def test_proj_antimeridian_bbox(): assert_box2d_almost_equal(rev_ext, better) # checks for not being snapped (ie. not antimeridian) - normal = mapnik.Box2d(148.766759749,-60.1222810238,159.95484893,-24.9771195151) + normal = mapnik.Box2d(148.766759749, -60.1222810238, + 159.95484893, -24.9771195151) buffered_query_ext = mapnik.Box2d(274000, 3087000, 276000, 7173000) fwd_ext = prj_trans_fwd.forward(buffered_query_ext, PROJ_ENVELOPE_POINTS) assert_box2d_almost_equal(fwd_ext, normal) @@ -145,7 +140,3 @@ def test_proj_antimeridian_bbox(): ext = mapnik.Box2d(274000, 3087000, 276000, 7173000) rev_ext = prj_trans_rev.backward(ext, PROJ_ENVELOPE_POINTS) assert_box2d_almost_equal(rev_ext, normal) - - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/python_plugin_test.py b/test/python_tests/python_plugin_test.py index a39272f6b..abeca7d0c 100644 --- a/test/python_tests/python_plugin_test.py +++ b/test/python_tests/python_plugin_test.py @@ -62,7 +62,7 @@ # def within_circle(p): # delta_x = p[0] - self.container.centre[0] # delta_y = p[0] - self.container.centre[0] -# return delta_x*delta_x + delta_y*delta_y < self.radius*self.radius +# return delta_x*delta_x + delta_y*delta_y < self.radius*self.radius # if all(within_circle(p) for p in (tl,tr,bl,br)): # raise StopIteration() diff --git a/test/python_tests/query_test.py b/test/python_tests/query_test.py index 8da353448..8a0d58903 100644 --- a/test/python_tests/query_test.py +++ b/test/python_tests/query_test.py @@ -1,37 +1,33 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os, mapnik - -from nose.tools import eq_,assert_almost_equal,raises -from utilities import execution_path, run_all +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_query_init(): +def test_query_init(setup): bbox = (-180, -90, 180, 90) query = mapnik.Query(mapnik.Box2d(*bbox)) r = query.resolution - assert_almost_equal(r[0], 1.0, places=7) - assert_almost_equal(r[1], 1.0, places=7) + assert r[0] == pytest.approx(1.0, abs=1e-7) + assert r[1] == pytest.approx(1.0, abs=1e-7) # https://github.com/mapnik/mapnik/issues/1762 - eq_(query.property_names,[]) + assert query.property_names == [] query.add_property_name('migurski') - eq_(query.property_names,['migurski']) + assert query.property_names == ['migurski'] # Converting *from* tuples *to* resolutions is not yet supported -@raises(TypeError) -def test_query_resolution(): - bbox = (-180, -90, 180, 90) - init_res = (4.5, 6.7) - query = mapnik.Query(mapnik.Box2d(*bbox), init_res) - r = query.resolution - assert_almost_equal(r[0], init_res[0], places=7) - assert_almost_equal(r[1], init_res[1], places=7) -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +def test_query_resolution(): + with pytest.raises(TypeError): + bbox = (-180, -90, 180, 90) + init_res = (4.5, 6.7) + query = mapnik.Query(mapnik.Box2d(*bbox), init_res) + r = query.resolution + assert r[0] == pytest.approx(init_res[0], abs=1e-7) + assert r[1] == pytest.approx(init_res[1], abs=1e-7) diff --git a/test/python_tests/query_tolerance_test.py b/test/python_tests/query_tolerance_test.py index 97c1b3e2c..da2a1cf60 100644 --- a/test/python_tests/query_tolerance_test.py +++ b/test/python_tests/query_tolerance_test.py @@ -1,43 +1,40 @@ -#!/usr/bin/env python - -from nose.tools import eq_ -from utilities import execution_path, run_all -import os, mapnik +import os +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'shape' in mapnik.DatasourceCache.plugin_names(): - def test_query_tolerance(): - srs = '+init=epsg:4326' + def test_query_tolerance(setup): + srs = 'epsg:4326' lyr = mapnik.Layer('test') ds = mapnik.Shapefile(file='../data/shp/arrows.shp') lyr.datasource = ds lyr.srs = srs _width = 256 - _map = mapnik.Map(_width,_width, srs) + _map = mapnik.Map(_width, _width, srs) _map.layers.append(lyr) # zoom determines tolerance _map.zoom_all() _map_env = _map.envelope() tol = (_map_env.maxx - _map_env.minx) / _width * 3 # 0.046875 for arrows.shp and zoom_all - eq_(tol,0.046875) + assert tol == 0.046875 # check point really exists x, y = 2.0, 4.0 - features = _map.query_point(0,x,y).features - eq_(len(features),1) + features = _map.query_point(0, x, y) + assert len(list(features)) == 1 # check inside tolerance limit x = 2.0 + tol * 0.9 - features = _map.query_point(0,x,y).features - eq_(len(features),1) + features = _map.query_point(0, x, y) + assert len(list(features)) == 1 # check outside tolerance limit x = 2.0 + tol * 1.1 - features = _map.query_point(0,x,y).features - eq_(len(features),0) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + features = _map.query_point(0, x, y) + assert len(list(features)) == 0 diff --git a/test/python_tests/raster_colorizer_test.py b/test/python_tests/raster_colorizer_test.py index 6fb0102e2..e9995a5fe 100644 --- a/test/python_tests/raster_colorizer_test.py +++ b/test/python_tests/raster_colorizer_test.py @@ -1,90 +1,95 @@ -#coding=utf8 import os +import sys +import pytest import mapnik -from utilities import execution_path, run_all -from nose.tools import eq_ +from .utilities import execution_path + +@pytest.fixture def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield + +# test discrete colorizer mode +def test_get_color_discrete(setup): + # setup + colorizer = mapnik.RasterColorizer() + colorizer.default_color = mapnik.Color(0, 0, 0, 0) + colorizer.default_mode = mapnik.COLORIZER_DISCRETE -#test discrete colorizer mode -def test_get_color_discrete(): - #setup - colorizer = mapnik.RasterColorizer(); - colorizer.default_color = mapnik.Color(0,0,0,0); - colorizer.default_mode = mapnik.COLORIZER_DISCRETE; + colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100)) + colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200)) - colorizer.add_stop(10, mapnik.Color(100,100,100,100)); - colorizer.add_stop(20, mapnik.Color(200,200,200,200)); + # should be default colour + assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0) + assert colorizer.get_color(0) == mapnik.Color(0, 0, 0, 0) - #should be default colour - eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0)); - eq_(colorizer.get_color(0), mapnik.Color(0,0,0,0)); + # now in stop 1 + assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100) + assert colorizer.get_color(19) == mapnik.Color(100, 100, 100, 100) - #now in stop 1 - eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100)); - eq_(colorizer.get_color(19), mapnik.Color(100,100,100,100)); + # now in stop 2 + assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200) + assert colorizer.get_color(1000) == mapnik.Color(200, 200, 200, 200) + +# test exact colorizer mode - #now in stop 2 - eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200)); - eq_(colorizer.get_color(1000), mapnik.Color(200,200,200,200)); -#test exact colorizer mode def test_get_color_exact(): - #setup - colorizer = mapnik.RasterColorizer(); - colorizer.default_color = mapnik.Color(0,0,0,0); - colorizer.default_mode = mapnik.COLORIZER_EXACT; + # setup + colorizer = mapnik.RasterColorizer() + colorizer.default_color = mapnik.Color(0, 0, 0, 0) + colorizer.default_mode = mapnik.COLORIZER_EXACT + + colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100)) + colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200)) - colorizer.add_stop(10, mapnik.Color(100,100,100,100)); - colorizer.add_stop(20, mapnik.Color(200,200,200,200)); + # should be default colour + assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0) + assert colorizer.get_color(11) == mapnik.Color(0, 0, 0, 0) + assert colorizer.get_color(20.001) == mapnik.Color(0, 0, 0, 0) - #should be default colour - eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0)); - eq_(colorizer.get_color(11), mapnik.Color(0,0,0,0)); - eq_(colorizer.get_color(20.001), mapnik.Color(0,0,0,0)); + # should be stop 1 + assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100) - #should be stop 1 - eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100)); + # should be stop 2 + assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200) + +# test linear colorizer mode - #should be stop 2 - eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200)); -#test linear colorizer mode def test_get_color_linear(): - #setup - colorizer = mapnik.RasterColorizer(); - colorizer.default_color = mapnik.Color(0,0,0,0); - colorizer.default_mode = mapnik.COLORIZER_LINEAR; + # setup + colorizer = mapnik.RasterColorizer() + colorizer.default_color = mapnik.Color(0, 0, 0, 0) + colorizer.default_mode = mapnik.COLORIZER_LINEAR + + colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100)) + colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200)) - colorizer.add_stop(10, mapnik.Color(100,100,100,100)); - colorizer.add_stop(20, mapnik.Color(200,200,200,200)); + # should be default colour + assert colorizer.get_color(-50) == mapnik.Color(0, 0, 0, 0) + assert colorizer.get_color(9.9) == mapnik.Color(0, 0, 0, 0) - #should be default colour - eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0)); - eq_(colorizer.get_color(9.9), mapnik.Color(0,0,0,0)); + # should be stop 1 + assert colorizer.get_color(10) == mapnik.Color(100, 100, 100, 100) - #should be stop 1 - eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100)); + # should be stop 2 + assert colorizer.get_color(20) == mapnik.Color(200, 200, 200, 200) - #should be stop 2 - eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200)); + # half way between stops 1 and 2 + assert colorizer.get_color(15) == mapnik.Color(150, 150, 150, 150) - #half way between stops 1 and 2 - eq_(colorizer.get_color(15), mapnik.Color(150,150,150,150)); + # after stop 2 + assert colorizer.get_color(100) == mapnik.Color(200, 200, 200, 200) - #after stop 2 - eq_(colorizer.get_color(100), mapnik.Color(200,200,200,200)); def test_stop_label(): - stop = mapnik.ColorizerStop(1, mapnik.COLORIZER_LINEAR, mapnik.Color('red')) + stop = mapnik.ColorizerStop( + 1, mapnik.COLORIZER_LINEAR, mapnik.Color('red')) assert not stop.label - label = u"32º C".encode('utf8') + label = u"32º C" stop.label = label assert stop.label == label, stop.label - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/raster_symbolizer_test.py b/test/python_tests/raster_symbolizer_test.py index 9092118dc..04ab6e760 100644 --- a/test/python_tests/raster_symbolizer_test.py +++ b/test/python_tests/raster_symbolizer_test.py @@ -1,101 +1,105 @@ -#!/usr/bin/env python - -from nose.tools import eq_ -from utilities import execution_path, run_all, get_unique_colors - -import os, mapnik +import os +import mapnik +import pytest +from .utilities import execution_path, get_unique_colors +@pytest.fixture def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield - -def test_dataraster_coloring(): - srs = '+init=epsg:32630' +def test_dataraster_coloring(setup): + srs = 'epsg:32630' lyr = mapnik.Layer('dataraster') if 'gdal' in mapnik.DatasourceCache.plugin_names(): lyr.datasource = mapnik.Gdal( - file = '../data/raster/dataraster.tif', - band = 1, - ) + file='../data/raster/dataraster.tif', + band=1, + ) lyr.srs = srs - _map = mapnik.Map(256,256, srs) + _map = mapnik.Map(256, 256, srs) style = mapnik.Style() rule = mapnik.Rule() sym = mapnik.RasterSymbolizer() # Assigning a colorizer to the RasterSymbolizer tells the later # that it should use it to colorize the raw data raster - colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent")) + colorizer = mapnik.RasterColorizer( + mapnik.COLORIZER_DISCRETE, + mapnik.Color("transparent")) for value, color in [ - ( 0, "#0044cc"), - ( 10, "#00cc00"), - ( 20, "#ffff00"), - ( 30, "#ff7f00"), - ( 40, "#ff0000"), - ( 50, "#ff007f"), - ( 60, "#ff00ff"), - ( 70, "#cc00cc"), - ( 80, "#990099"), - ( 90, "#660066"), - ( 200, "transparent"), + (0, "#0044cc"), + (10, "#00cc00"), + (20, "#ffff00"), + (30, "#ff7f00"), + (40, "#ff0000"), + (50, "#ff007f"), + (60, "#ff00ff"), + (70, "#cc00cc"), + (80, "#990099"), + (90, "#660066"), + (200, "transparent"), ]: colorizer.add_stop(value, mapnik.Color(color)) - sym.colorizer = colorizer; - rule.symbols.append(sym) + sym.colorizer = colorizer + rule.symbolizers.append(sym) style.rules.append(rule) _map.append_style('foo', style) lyr.styles.append('foo') _map.layers.append(lyr) _map.zoom_to_box(lyr.envelope()) - im = mapnik.Image(_map.width,_map.height) + im = mapnik.Image(_map.width, _map.height) mapnik.render(_map, im) expected_file = './images/support/dataraster_coloring.png' actual_file = '/tmp/' + os.path.basename(expected_file) - im.save(actual_file,'png32') + im.save(actual_file, 'png32') if not os.path.exists(expected_file) or os.environ.get('UPDATE'): - im.save(expected_file,'png32') + im.save(expected_file, 'png32') actual = mapnik.Image.open(actual_file) expected = mapnik.Image.open(expected_file) - eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file)) + assert actual.to_string('png32') == expected.to_string('png32'),'failed comparing actual (%s) and expected (%s)' % (actual_file, + expected_file) + def test_dataraster_query_point(): - srs = '+init=epsg:32630' + srs = 'epsg:32630' lyr = mapnik.Layer('dataraster') if 'gdal' in mapnik.DatasourceCache.plugin_names(): lyr.datasource = mapnik.Gdal( - file = '../data/raster/dataraster.tif', - band = 1, - ) + file='../data/raster/dataraster.tif', + band=1, + ) lyr.srs = srs - _map = mapnik.Map(256,256, srs) + _map = mapnik.Map(256, 256, srs) _map.layers.append(lyr) - x, y = 556113.0,4381428.0 # center of extent of raster + x, y = 556113.0, 4381428.0 # center of extent of raster _map.zoom_all() - features = _map.query_point(0,x,y).features + features = list(_map.query_point(0, x, y)) assert len(features) == 1 feat = features[0] center = feat.envelope().center() - assert center.x==x and center.y==y, center + assert center.x == x and center.y == y, center value = feat['value'] assert value == 18.0, value # point inside map extent but outside raster extent current_box = _map.envelope() - current_box.expand_to_include(-427417,4477517) + current_box.expand_to_include(-427417, 4477517) _map.zoom_to_box(current_box) - features = _map.query_point(0,-427417,4477517).features - assert len(features) == 0 + features = _map.query_point(0, -427417, 4477517) + assert len(list(features)) == 0 # point inside raster extent with nodata - features = _map.query_point(0,126850,4596050).features - assert len(features) == 0 + features = _map.query_point(0, 126850, 4596050) + assert len(list(features)) == 0 + def test_load_save_map(): - map = mapnik.Map(256,256) + map = mapnik.Map(256, 256) in_map = "../data/good_maps/raster_symbolizer.xml" try: mapnik.load_map(map, in_map) @@ -104,11 +108,12 @@ def test_load_save_map(): assert 'RasterSymbolizer' in out_map assert 'RasterColorizer' in out_map assert 'stop' in out_map - except RuntimeError, e: + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e): raise RuntimeError(str(e)) + def test_raster_with_alpha_blends_correctly_with_background(): WIDTH = 500 HEIGHT = 500 @@ -122,7 +127,7 @@ def test_raster_with_alpha_blends_correctly_with_background(): symbolizer = mapnik.RasterSymbolizer() symbolizer.scaling = mapnik.scaling_method.BILINEAR - rule.symbols.append(symbolizer) + rule.symbolizers.append(symbolizer) style.rules.append(rule) map.append_style('raster_style', style) @@ -139,26 +144,28 @@ def test_raster_with_alpha_blends_correctly_with_background(): mim = mapnik.Image(WIDTH, HEIGHT) mapnik.render(map, mim) - mim.tostring() + mim.to_string() # All white is expected - eq_(get_unique_colors(mim),['rgba(254,254,254,255)']) + assert get_unique_colors(mim) == ['rgba(254,254,254,255)'] + def test_raster_warping(): - lyrSrs = "+init=epsg:32630" - mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' + lyrSrs = "epsg:32630" + mapSrs = 'epsg:4326' lyr = mapnik.Layer('dataraster', lyrSrs) if 'gdal' in mapnik.DatasourceCache.plugin_names(): lyr.datasource = mapnik.Gdal( - file = '../data/raster/dataraster.tif', - band = 1, - ) + file='../data/raster/dataraster.tif', + band=1, + ) sym = mapnik.RasterSymbolizer() - sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0)) + sym.colorizer = mapnik.RasterColorizer( + mapnik.COLORIZER_DISCRETE, mapnik.Color(255, 255, 0)) rule = mapnik.Rule() - rule.symbols.append(sym) + rule.symbolizers.append(sym) style = mapnik.Style() style.rules.append(rule) - _map = mapnik.Map(256,256, mapSrs) + _map = mapnik.Map(256, 256, mapSrs) _map.append_style('foo', style) lyr.styles.append('foo') _map.layers.append(lyr) @@ -168,50 +175,50 @@ def test_raster_warping(): layer_proj) _map.zoom_to_box(prj_trans.backward(lyr.envelope())) - im = mapnik.Image(_map.width,_map.height) + im = mapnik.Image(_map.width, _map.height) mapnik.render(_map, im) expected_file = './images/support/raster_warping.png' actual_file = '/tmp/' + os.path.basename(expected_file) - im.save(actual_file,'png32') + im.save(actual_file, 'png32') if not os.path.exists(expected_file) or os.environ.get('UPDATE'): - im.save(expected_file,'png32') + im.save(expected_file, 'png32') actual = mapnik.Image.open(actual_file) expected = mapnik.Image.open(expected_file) - eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file)) + assert actual.to_string('png32') == expected.to_string('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file, + expected_file) + def test_raster_warping_does_not_overclip_source(): - lyrSrs = "+init=epsg:32630" - mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' + lyrSrs = "epsg:32630" + mapSrs = 'epsg:4326' lyr = mapnik.Layer('dataraster', lyrSrs) if 'gdal' in mapnik.DatasourceCache.plugin_names(): lyr.datasource = mapnik.Gdal( - file = '../data/raster/dataraster.tif', - band = 1, - ) + file='../data/raster/dataraster.tif', + band=1, + ) sym = mapnik.RasterSymbolizer() - sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0)) + sym.colorizer = mapnik.RasterColorizer( + mapnik.COLORIZER_DISCRETE, mapnik.Color(255, 255, 0)) rule = mapnik.Rule() - rule.symbols.append(sym) + rule.symbolizers.append(sym) style = mapnik.Style() style.rules.append(rule) - _map = mapnik.Map(256,256, mapSrs) - _map.background=mapnik.Color('white') + _map = mapnik.Map(256, 256, mapSrs) + _map.background = mapnik.Color('white') _map.append_style('foo', style) lyr.styles.append('foo') _map.layers.append(lyr) - _map.zoom_to_box(mapnik.Box2d(3,42,4,43)) + _map.zoom_to_box(mapnik.Box2d(3, 42, 4, 43)) - im = mapnik.Image(_map.width,_map.height) + im = mapnik.Image(_map.width, _map.height) mapnik.render(_map, im) expected_file = './images/support/raster_warping_does_not_overclip_source.png' actual_file = '/tmp/' + os.path.basename(expected_file) - im.save(actual_file,'png32') + im.save(actual_file, 'png32') if not os.path.exists(expected_file) or os.environ.get('UPDATE'): - im.save(expected_file,'png32') + im.save(expected_file, 'png32') actual = mapnik.Image.open(actual_file) expected = mapnik.Image.open(expected_file) - eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file)) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert actual.to_string('png32') == expected.to_string('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file, + expected_file) diff --git a/test/python_tests/rasterlite_test.py b/test/python_tests/rasterlite_test.py index b15b1574d..015df2e71 100644 --- a/test/python_tests/rasterlite_test.py +++ b/test/python_tests/rasterlite_test.py @@ -1,38 +1,34 @@ -#!/usr/bin/env python +import os +import mapnik +import pytest -from nose.tools import eq_,assert_almost_equal -from utilities import execution_path, run_all - -import os, mapnik +from .utilities import execution_path +@pytest.fixture def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) - + yield if 'rasterlite' in mapnik.DatasourceCache.plugin_names(): def test_rasterlite(): ds = mapnik.Rasterlite( - file = '../data/rasterlite/globe.sqlite', - table = 'globe' - ) + file='../data/rasterlite/globe.sqlite', + table='globe' + ) e = ds.envelope() - assert_almost_equal(e.minx,-180, places=5) - assert_almost_equal(e.miny, -90, places=5) - assert_almost_equal(e.maxx, 180, places=5) - assert_almost_equal(e.maxy, 90, places=5) - eq_(len(ds.fields()),0) + assert e.minx == pytest.approx(-180,abs=1e-5) + assert e.miny == pytest.approx(-90, abs=1e-5) + assert e.maxx == pytest.approx(180, abs=1e-5) + assert e.maxy == pytest.approx( 90, abs=1e-5) + assert len(ds.fields()) == 0 query = mapnik.Query(ds.envelope()) for fld in ds.fields(): query.add_property_name(fld) fs = ds.features(query) feat = fs.next() - eq_(feat.id(),1) - eq_(feat.attributes,{}) - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert feat.id() == 1 + assert feat.attributes == {} diff --git a/test/python_tests/render_grid_test.py b/test/python_tests/render_grid_test.py index 85c7401c6..399c0393c 100644 --- a/test/python_tests/render_grid_test.py +++ b/test/python_tests/render_grid_test.py @@ -1,356 +1,986 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +import os +import mapnik +import json +import pytest -from nose.tools import eq_,raises -from utilities import execution_path, run_all -import os, mapnik - -try: - import json -except ImportError: - import simplejson as json +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if mapnik.has_grid_renderer(): - def show_grids(name,g1,g2): + def show_grids(name, g1, g2): g1_file = '/tmp/mapnik-%s-actual.json' % name - open(g1_file,'w').write(json.dumps(g1,sort_keys=True)) + with open(g1_file, 'w') as f: + f.write(json.dumps(g1, sort_keys=True)) g2_file = '/tmp/mapnik-%s-expected.json' % name - open(g2_file,'w').write(json.dumps(g2,sort_keys=True)) + with open(g2_file, 'w') as f: + f.write(json.dumps(g2, sort_keys=True)) val = 'JSON does not match ->\n' if g1['grid'] != g2['grid']: - val += ' X grid does not match\n' + val += ' X grid does not match\n' else: - val += ' ✓ grid matches\n' + val += ' ✓ grid matches\n' if g1['data'].keys() != g2['data'].keys(): - val += ' X data does not match\n' + val += ' X data does not match\n' else: - val += ' ✓ data matches\n' + val += ' ✓ data matches\n' if g1['keys'] != g2['keys']: - val += ' X keys do not\n' + val += ' X keys do not\n' else: - val += ' ✓ keys match\n' - val += '\n\t%s\n\t%s' % (g1_file,g2_file) + val += ' ✓ keys match\n' + val += '\n\t%s\n\t%s' % (g1_file, g2_file) return val - def show_grids2(name,g1,g2): + def show_grids2(name, g1, g2): g2_expected = '../data/grids/mapnik-%s-actual.json' % name if not os.path.exists(g2_expected): # create test fixture based on actual results - open(g2_expected,'a+').write(json.dumps(g1,sort_keys=True)) + with open(g2_expected, 'a+') as f: + f.write(json.dumps(g1, sort_keys=True)) return g1_file = '/tmp/mapnik-%s-actual.json' % name - open(g1_file,'w').write(json.dumps(g1,sort_keys=True)) + with open(g1_file, 'w') as f: + f.write(json.dumps(g1, sort_keys=True)) val = 'JSON does not match ->\n' if g1['grid'] != g2['grid']: - val += ' X grid does not match\n' + val += ' X grid does not match\n' else: - val += ' ✓ grid matches\n' + val += ' ✓ grid matches\n' if g1['data'].keys() != g2['data'].keys(): - val += ' X data does not match\n' + val += ' X data does not match\n' else: - val += ' ✓ data matches\n' + val += ' ✓ data matches\n' if g1['keys'] != g2['keys']: - val += ' X keys do not\n' + val += ' X keys do not\n' else: - val += ' ✓ keys match\n' - val += '\n\t%s\n\t%s' % (g1_file,g2_expected) + val += ' ✓ keys match\n' + val += '\n\t%s\n\t%s' % (g1_file, g2_expected) return val - + # previous rendering using agg ellipse directly - grid_correct_new = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]} + grid_correct_new = { + "data": { + "North East": { + "Name": "North East"}, + "North West": { + "Name": "North West"}, + "South East": { + "Name": "South East"}, + "South West": { + "Name": "South West"}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !! ## ", + " !!! ### ", + " !! ## ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$ %% ", + " $$$ %% ", + " $$ %% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "North West", + "North East", + "South West", + "South East"]} # newer rendering using svg - grid_correct_new2 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]} - - grid_correct_new3 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]} - - def resolve(grid,row,col): + grid_correct_new2 = { + "data": { + "North East": { + "Name": "North East"}, + "North West": { + "Name": "North West"}, + "South East": { + "Name": "South East"}, + "South West": { + "Name": "South West"}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !! ## ", + " !!! ### ", + " !! ## ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$ %% ", + " $$$ %% ", + " $$ %% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "North West", + "North East", + "South West", + "South East"]} + + grid_correct_new3 = { + "data": { + "North East": { + "Name": "North East"}, + "North West": { + "Name": "North West"}, + "South East": { + "Name": "South East"}, + "South West": { + "Name": "South West"}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !! ## ", + " !!! ### ", + " !! ## ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$ %% ", + " $$$ %% ", + " $ %% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "North West", + "North East", + "South West", + "South East"]} + + def resolve(grid, row, col): """ Resolve the attributes for a given pixel in a grid. """ row = grid['grid'][row] utf_val = row[col] - #http://docs.python.org/library/functions.html#ord + # http://docs.python.org/library/functions.html#ord codepoint = ord(utf_val) if (codepoint >= 93): - codepoint-=1 + codepoint -= 1 if (codepoint >= 35): - codepoint-=1 + codepoint -= 1 codepoint -= 32 key = grid['keys'][codepoint] return grid['data'].get(key) - - def create_grid_map(width,height,sym): + def create_grid_map(width, height, sym): ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['Name'] = 'South East' f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.60)') ds.add_feature(f) - f = mapnik.Feature(context,2) + f = mapnik.Feature(context, 2) f['Name'] = 'South West' f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.60)') ds.add_feature(f) - f = mapnik.Feature(context,3) + f = mapnik.Feature(context, 3) f['Name'] = 'North West' f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.38)') ds.add_feature(f) - f = mapnik.Feature(context,4) + f = mapnik.Feature(context, 4) f['Name'] = 'North East' f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.38)') ds.add_feature(f) s = mapnik.Style() r = mapnik.Rule() sym.allow_overlap = True - r.symbols.append(sym) + r.symbolizers.append(sym) s.rules.append(r) lyr = mapnik.Layer('Places') lyr.datasource = ds lyr.styles.append('places_labels') - m = mapnik.Map(width,height) - m.append_style('places_labels',s) + m = mapnik.Map(width, height) + m.append_style('places_labels', s) m.layers.append(lyr) return m - - def test_render_grid(): + def test_render_grid(setup): """ test render_grid method""" - width,height = 256,256 + width, height = 256, 256 sym = mapnik.MarkersSymbolizer() sym.width = mapnik.Expression('10') sym.height = mapnik.Expression('10') - m = create_grid_map(width,height,sym) - ul_lonlat = mapnik.Coord(142.30,-38.20) - lr_lonlat = mapnik.Coord(143.40,-38.80) - m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat)) + m = create_grid_map(width, height, sym) + ul_lonlat = mapnik.Coord(142.30, -38.20) + lr_lonlat = mapnik.Coord(143.40, -38.80) + m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat)) # new method - grid = mapnik.Grid(m.width,m.height,key='Name') - mapnik.render_layer(m,grid,layer=0,fields=['Name']) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1,grid_correct_new3,show_grids('new-markers',utf1,grid_correct_new3)) + grid = mapnik.Grid(m.width, m.height, key='Name') + mapnik.render_layer(m, grid, layer=0, fields=['Name']) + utf1 = grid.encode('utf', resolution=4) + assert utf1 == grid_correct_new3, show_grids('new-markers', utf1, grid_correct_new3) # check a full view is the same as a full image - grid_view = grid.view(0,0,width,height) + grid_view = grid.view(0, 0, width, height) # for kicks check at full res too - utf3 = grid.encode('utf',resolution=1) - utf4 = grid_view.encode('utf',resolution=1) - eq_(utf3['grid'],utf4['grid']) - eq_(utf3['keys'],utf4['keys']) - eq_(utf3['data'],utf4['data']) + utf3 = grid.encode('utf', resolution=1) + utf4 = grid_view.encode('utf', resolution=1) + assert utf3['grid'] == utf4['grid'] + assert utf3['keys'] == utf4['keys'] + assert utf3['data'] == utf4['data'] - eq_(resolve(utf4,0,0),None) + assert resolve(utf4, 0, 0) == None # resolve some center points in the # resampled view - utf5 = grid_view.encode('utf',resolution=4) - eq_(resolve(utf5,25,10),{"Name": "North West"}) - eq_(resolve(utf5,25,46),{"Name": "North East"}) - eq_(resolve(utf5,38,10),{"Name": "South West"}) - eq_(resolve(utf5,38,46),{"Name": "South East"}) - - - grid_feat_id = {'keys': ['', '3', '4', '2', '1'], 'data': {'1': {'Name': 'South East'}, '3': {'Name': u'North West'}, '2': {'Name': 'South West'}, '4': {'Name': 'North East'}}, 'grid': [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' !! ## ', ' !!! ### ', ' !! ## ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' $$$ %% ', ' $$$ %%% ', ' $$ %% ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']} - - grid_feat_id2 = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]} - - grid_feat_id3 = {"data": {"1": {"Name": "South East", "__id__": 1}, "2": {"Name": "South West", "__id__": 2}, "3": {"Name": "North West", "__id__": 3}, "4": {"Name": "North East", "__id__": 4}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]} + utf5 = grid_view.encode('utf', resolution=4) + assert resolve(utf5, 25, 10) == {"Name": "North West"} + assert resolve(utf5, 25, 46) == {"Name": "North East"} + assert resolve(utf5, 38, 10) == {"Name": "South West"} + assert resolve(utf5, 38, 46) == {"Name": "South East"} + + grid_feat_id = { + 'keys': [ + '', + '3', + '4', + '2', + '1'], + 'data': { + '1': { + 'Name': 'South East'}, + '3': { + 'Name': u'North West'}, + '2': { + 'Name': 'South West'}, + '4': { + 'Name': 'North East'}}, + 'grid': [ + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' !! ## ', + ' !!! ### ', + ' !! ## ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' $$$ %% ', + ' $$$ %%% ', + ' $$ %% ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ']} + + grid_feat_id2 = { + "data": { + "1": { + "Name": "South East"}, + "2": { + "Name": "South West"}, + "3": { + "Name": "North West"}, + "4": { + "Name": "North East"}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !! ## ", + " !!! ### ", + " !! ## ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$ %% ", + " $$$ %% ", + " $$ %% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "3", + "4", + "2", + "1"]} + + grid_feat_id3 = { + "data": { + "1": { + "Name": "South East", + "__id__": 1}, + "2": { + "Name": "South West", + "__id__": 2}, + "3": { + "Name": "North West", + "__id__": 3}, + "4": { + "Name": "North East", + "__id__": 4}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !! ## ", + " !!! ### ", + " !! ## ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$ %% ", + " $$$ %% ", + " $ %% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "3", + "4", + "2", + "1"]} def test_render_grid3(): """ test using feature id""" - width,height = 256,256 + width, height = 256, 256 sym = mapnik.MarkersSymbolizer() sym.width = mapnik.Expression('10') sym.height = mapnik.Expression('10') - m = create_grid_map(width,height,sym) - ul_lonlat = mapnik.Coord(142.30,-38.20) - lr_lonlat = mapnik.Coord(143.40,-38.80) - m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat)) - - grid = mapnik.Grid(m.width,m.height,key='__id__') - mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name']) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1,grid_feat_id3,show_grids('id-markers',utf1,grid_feat_id3)) + m = create_grid_map(width, height, sym) + ul_lonlat = mapnik.Coord(142.30, -38.20) + lr_lonlat = mapnik.Coord(143.40, -38.80) + m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat)) + + grid = mapnik.Grid(m.width, m.height, key='__id__') + mapnik.render_layer(m, grid, layer=0, fields=['__id__', 'Name']) + utf1 = grid.encode('utf', resolution=4) + assert utf1 == grid_feat_id3, show_grids('id-markers', utf1 == grid_feat_id3) # check a full view is the same as a full image - grid_view = grid.view(0,0,width,height) + grid_view = grid.view(0, 0, width, height) # for kicks check at full res too - utf3 = grid.encode('utf',resolution=1) - utf4 = grid_view.encode('utf',resolution=1) - eq_(utf3['grid'],utf4['grid']) - eq_(utf3['keys'],utf4['keys']) - eq_(utf3['data'],utf4['data']) + utf3 = grid.encode('utf', resolution=1) + utf4 = grid_view.encode('utf', resolution=1) + assert utf3['grid'] == utf4['grid'] + assert utf3['keys'] == utf4['keys'] + assert utf3['data'] == utf4['data'] - eq_(resolve(utf4,0,0),None) + assert resolve(utf4, 0, 0) == None # resolve some center points in the # resampled view - utf5 = grid_view.encode('utf',resolution=4) - eq_(resolve(utf5,25,10),{"Name": "North West","__id__": 3}) - eq_(resolve(utf5,25,46),{"Name": "North East","__id__": 4}) - eq_(resolve(utf5,38,10),{"Name": "South West","__id__": 2}) - eq_(resolve(utf5,38,46),{"Name": "South East","__id__": 1}) - + utf5 = grid_view.encode('utf', resolution=4) + assert resolve(utf5, 25, 10) == {"Name": "North West", "__id__": 3} + assert resolve(utf5, 25, 46) == {"Name": "North East", "__id__": 4} + assert resolve(utf5, 38, 10) == {"Name": "South West", "__id__": 2} + assert resolve(utf5, 38, 46) == {"Name": "South East", "__id__": 1} def gen_grid_for_id(pixel_key): ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') - f = mapnik.Feature(context,pixel_key) + f = mapnik.Feature(context, pixel_key) f['Name'] = str(pixel_key) - f.geometry = mapnik.Geometry.from_wkt('POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))') + f.geometry = mapnik.Geometry.from_wkt( + 'POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))') ds.add_feature(f) s = mapnik.Style() r = mapnik.Rule() symb = mapnik.PolygonSymbolizer() - r.symbols.append(symb) + r.symbolizers.append(symb) s.rules.append(r) lyr = mapnik.Layer('Places') lyr.datasource = ds lyr.styles.append('places_labels') - width,height = 256,256 - m = mapnik.Map(width,height) - m.append_style('places_labels',s) + width, height = 256, 256 + m = mapnik.Map(width, height) + m.append_style('places_labels', s) m.layers.append(lyr) m.zoom_all() - grid = mapnik.Grid(m.width,m.height,key='__id__') - mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name']) + grid = mapnik.Grid(m.width, m.height, key='__id__') + mapnik.render_layer(m, grid, layer=0, fields=['__id__', 'Name']) return grid def test_negative_id(): grid = gen_grid_for_id(-1) - eq_(grid.get_pixel(128,128),-1) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],['-1']) + assert grid.get_pixel(128, 128) == -1 + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == ['-1'] def test_32bit_int_id(): int32 = 2147483647 grid = gen_grid_for_id(int32) - eq_(grid.get_pixel(128,128),int32) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],[str(int32)]) + assert grid.get_pixel(128, 128) == int32 + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == [str(int32)] max_neg = -(int32) grid = gen_grid_for_id(max_neg) - eq_(grid.get_pixel(128,128),max_neg) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],[str(max_neg)]) + assert grid.get_pixel(128, 128) == max_neg + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == [str(max_neg)] def test_64bit_int_id(): int64 = 0x7FFFFFFFFFFFFFFF grid = gen_grid_for_id(int64) - eq_(grid.get_pixel(128,128),int64) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],[str(int64)]) + assert grid.get_pixel(128, 128) == int64 + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == [str(int64)] max_neg = -(int64) grid = gen_grid_for_id(max_neg) - eq_(grid.get_pixel(128,128),max_neg) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],[str(max_neg)]) + assert grid.get_pixel(128, 128) == max_neg + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == [str(max_neg)] def test_id_zero(): grid = gen_grid_for_id(0) - eq_(grid.get_pixel(128,128),0) - utf1 = grid.encode('utf',resolution=4) - eq_(utf1['keys'],['0']) - - line_expected = {"keys": ["", "1"], "data": {"1": {"Name": "1"}}, "grid": [" !", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", "!! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! "]} + assert grid.get_pixel(128, 128) == 0 + utf1 = grid.encode('utf', resolution=4) + assert utf1['keys'] == ['0'] + + line_expected = { + "keys": [ + "", + "1"], + "data": { + "1": { + "Name": "1"}}, + "grid": [ + " !", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + " !! ", + "!! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! ", + " ! "]} def test_line_rendering(): ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') pixel_key = 1 - f = mapnik.Feature(context,pixel_key) + f = mapnik.Feature(context, pixel_key) f['Name'] = str(pixel_key) - f.geometry = mapnik.Geometry.from_wkt('LINESTRING (30 10, 10 30, 40 40)') + f.geometry = mapnik.Geometry.from_wkt( + 'LINESTRING (30 10, 10 30, 40 40)') ds.add_feature(f) s = mapnik.Style() r = mapnik.Rule() symb = mapnik.LineSymbolizer() - r.symbols.append(symb) + r.symbolizers.append(symb) s.rules.append(r) lyr = mapnik.Layer('Places') lyr.datasource = ds lyr.styles.append('places_labels') - width,height = 256,256 - m = mapnik.Map(width,height) - m.append_style('places_labels',s) + width, height = 256, 256 + m = mapnik.Map(width, height) + m.append_style('places_labels', s) m.layers.append(lyr) m.zoom_all() - #mapnik.render_to_file(m,'test.png') - grid = mapnik.Grid(m.width,m.height,key='__id__') - mapnik.render_layer(m,grid,layer=0,fields=['Name']) + # mapnik.render_to_file(m,'test.png') + grid = mapnik.Grid(m.width, m.height, key='__id__') + mapnik.render_layer(m, grid, layer=0, fields=['Name']) utf1 = grid.encode() - eq_(utf1,line_expected,show_grids('line',utf1,line_expected)) - - point_expected = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !!!! #### ", " !!!! #### ", " !!!! #### ", " !!!! #### ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$$$ %%%% ", " $$$$ %%%% ", " $$$$ %%%% ", " $$$$ %%%% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]} + assert utf1 == line_expected, show_grids('line', utf1, line_expected) + + point_expected = { + "data": { + "1": { + "Name": "South East"}, + "2": { + "Name": "South West"}, + "3": { + "Name": "North West"}, + "4": { + "Name": "North East"}}, + "grid": [ + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " !!!! #### ", + " !!!! #### ", + " !!!! #### ", + " !!!! #### ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " $$$$ %%%% ", + " $$$$ %%%% ", + " $$$$ %%%% ", + " $$$$ %%%% ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " ", + " "], + "keys": [ + "", + "3", + "4", + "2", + "1"]} def test_point_symbolizer_grid(): - width,height = 256,256 + width, height = 256, 256 sym = mapnik.PointSymbolizer() sym.file = '../data/images/dummy.png' - m = create_grid_map(width,height,sym) - ul_lonlat = mapnik.Coord(142.30,-38.20) - lr_lonlat = mapnik.Coord(143.40,-38.80) - m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat)) - grid = mapnik.Grid(m.width,m.height) - mapnik.render_layer(m,grid,layer=0,fields=['Name']) + m = create_grid_map(width, height, sym) + ul_lonlat = mapnik.Coord(142.30, -38.20) + lr_lonlat = mapnik.Coord(143.40, -38.80) + m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat)) + grid = mapnik.Grid(m.width, m.height) + mapnik.render_layer(m, grid, layer=0, fields=['Name']) utf1 = grid.encode() - eq_(utf1,point_expected,show_grids('point-sym',utf1,point_expected)) + assert utf1 == point_expected, show_grids('point-sym', utf1, point_expected) test_point_symbolizer_grid.requires_data = True # should throw because this is a mis-usage # https://github.com/mapnik/mapnik/issues/1325 - @raises(RuntimeError) def test_render_to_grid_multiple_times(): - # create map with two layers - m = mapnik.Map(256,256) - s = mapnik.Style() - r = mapnik.Rule() - sym = mapnik.MarkersSymbolizer() - sym.allow_overlap = True - r.symbols.append(sym) - s.rules.append(r) - m.append_style('points',s) - - # NOTE: we use a csv datasource here - # because the memorydatasource fails silently for - # queries requesting fields that do not exist in the datasource - ds1 = mapnik.Datasource(**{"type":"csv","inline":''' - wkt,Name - "POINT (143.10 -38.60)",South East'''}) - lyr1 = mapnik.Layer('One') - lyr1.datasource = ds1 - lyr1.styles.append('points') - m.layers.append(lyr1) - - ds2 = mapnik.Datasource(**{"type":"csv","inline":''' - wkt,Value - "POINT (142.48 -38.60)",South West'''}) - lyr2 = mapnik.Layer('Two') - lyr2.datasource = ds2 - lyr2.styles.append('points') - m.layers.append(lyr2) - - ul_lonlat = mapnik.Coord(142.30,-38.20) - lr_lonlat = mapnik.Coord(143.40,-38.80) - m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat)) - grid = mapnik.Grid(m.width,m.height) - mapnik.render_layer(m,grid,layer=0,fields=['Name']) - # should throw right here since Name will be a property now on the `grid` object - # and it is not found on the second layer - mapnik.render_layer(m,grid,layer=1,fields=['Value']) - grid.encode() - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + with pytest.raises(RuntimeError): + # create map with two layers + m = mapnik.Map(256, 256) + s = mapnik.Style() + r = mapnik.Rule() + sym = mapnik.MarkersSymbolizer() + sym.allow_overlap = True + r.symbolizers.append(sym) + s.rules.append(r) + m.append_style('points', s) + + # NOTE: we use a csv datasource here + # because the memorydatasource fails silently for + # queries requesting fields that do not exist in the datasource + ds1 = mapnik.Datasource(**{"type": "csv", "inline": ''' + wkt,Name + "POINT (143.10 -38.60)",South East'''}) + lyr1 = mapnik.Layer('One') + lyr1.datasource = ds1 + lyr1.styles.append('points') + m.layers.append(lyr1) + + ds2 = mapnik.Datasource(**{"type": "csv", "inline": ''' + wkt,Value + "POINT (142.48 -38.60)",South West'''}) + lyr2 = mapnik.Layer('Two') + lyr2.datasource = ds2 + lyr2.styles.append('points') + m.layers.append(lyr2) + + ul_lonlat = mapnik.Coord(142.30, -38.20) + lr_lonlat = mapnik.Coord(143.40, -38.80) + m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat)) + grid = mapnik.Grid(m.width, m.height) + mapnik.render_layer(m, grid, layer=0, fields=['Name']) + # should throw right here since Name will be a property now on the `grid` object + # and it is not found on the second layer + mapnik.render_layer(m, grid, layer=1, fields=['Value']) + grid.encode() diff --git a/test/python_tests/render_test.py b/test/python_tests/render_test.py index 197d010cf..b10058b5b 100644 --- a/test/python_tests/render_test.py +++ b/test/python_tests/render_test.py @@ -1,76 +1,81 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_,raises +import sys, os import tempfile -import os, mapnik -from utilities import execution_path, run_all +import mapnik +import pytest +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield -def test_simplest_render(): +def test_simplest_render(setup): m = mapnik.Map(256, 256) im = mapnik.Image(m.width, m.height) - eq_(im.painted(),False) - eq_(im.is_solid(),True) + assert not im.painted() + assert im.is_solid() mapnik.render(m, im) - eq_(im.painted(),False) - eq_(im.is_solid(),True) - s = im.tostring() - eq_(s, 256 * 256 * '\x00\x00\x00\x00') + assert not im.painted() + assert im.is_solid() + s = im.to_string() + assert s == 256 * 256 * b'\x00\x00\x00\x00' + def test_render_image_to_string(): im = mapnik.Image(256, 256) im.fill(mapnik.Color('black')) - eq_(im.painted(),False) - eq_(im.is_solid(),True) - s = im.tostring() - eq_(s, 256 * 256 * '\x00\x00\x00\xff') + assert not im.painted() + assert im.is_solid() + s = im.to_string() + assert s == 256 * 256 * b'\x00\x00\x00\xff' + def test_non_solid_image(): im = mapnik.Image(256, 256) im.fill(mapnik.Color('black')) - eq_(im.painted(),False) - eq_(im.is_solid(),True) + assert not im.painted() + assert im.is_solid() # set one pixel to a different color - im.set_pixel(0,0,mapnik.Color('white')) - eq_(im.painted(),False) - eq_(im.is_solid(),False) + im.set_pixel(0, 0, mapnik.Color('white')) + assert not im.painted() + assert not im.is_solid() + def test_non_solid_image_view(): im = mapnik.Image(256, 256) im.fill(mapnik.Color('black')) - view = im.view(0,0,256,256) - eq_(view.is_solid(),True) + view = im.view(0, 0, 256, 256) + assert view.is_solid() # set one pixel to a different color - im.set_pixel(0,0,mapnik.Color('white')) - eq_(im.is_solid(),False) + im.set_pixel(0, 0, mapnik.Color('white')) + assert not im.is_solid() # view, since it is the exact dimensions of the image # should also be non-solid - eq_(view.is_solid(),False) + assert not view.is_solid() # but not a view that excludes the single diff pixel - view2 = im.view(1,1,256,256) - eq_(view2.is_solid(),True) + view2 = im.view(1, 1, 256, 256) + assert view2.is_solid() + def test_setting_alpha(): - w,h = 256,256 - im1 = mapnik.Image(w,h) + w, h = 256, 256 + im1 = mapnik.Image(w, h) # white, half transparent c1 = mapnik.Color('rgba(255,255,255,.5)') im1.fill(c1) - eq_(im1.painted(),False) - eq_(im1.is_solid(),True) + assert not im1.painted() + assert im1.is_solid() # pure white - im2 = mapnik.Image(w,h) + im2 = mapnik.Image(w, h) c2 = mapnik.Color('rgba(255,255,255,1)') im2.fill(c2) - im2.apply_opacity(c1.a/255.0) - eq_(im2.painted(),False) - eq_(im2.is_solid(),True) - eq_(len(im1.tostring('png32')), len(im2.tostring('png32'))) + im2.apply_opacity(c1.a / 255.0) + assert not im2.painted() + assert im2.is_solid() + assert len(im1.to_string('png32')) == len(im2.to_string('png32')) + def test_render_image_to_file(): im = mapnik.Image(256, 256) @@ -87,46 +92,52 @@ def test_render_image_to_file(): else: return False -def get_paired_images(w,h,mapfile): + +def get_paired_images(w, h, mapfile): tmp_map = 'tmp_map.xml' - m = mapnik.Map(w,h) - mapnik.load_map(m,mapfile) - im = mapnik.Image(w,h) + m = mapnik.Map(w, h) + mapnik.load_map(m, mapfile) + im = mapnik.Image(w, h) m.zoom_all() - mapnik.render(m,im) - mapnik.save_map(m,tmp_map) - m2 = mapnik.Map(w,h) - mapnik.load_map(m2,tmp_map) - im2 = mapnik.Image(w,h) + mapnik.render(m, im) + mapnik.save_map(m, tmp_map) + m2 = mapnik.Map(w, h) + mapnik.load_map(m2, tmp_map) + im2 = mapnik.Image(w, h) m2.zoom_all() - mapnik.render(m2,im2) + mapnik.render(m2, im2) os.remove(tmp_map) - return im,im2 + return im, im2 + def test_render_from_serialization(): try: - im,im2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml') - eq_(im.tostring('png32'),im2.tostring('png32')) + im, im2 = get_paired_images( + 100, 100, '../data/good_maps/building_symbolizer.xml') + assert im.to_string('png32') == im2.to_string('png32') - im,im2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml') - eq_(im.tostring('png32'),im2.tostring('png32')) - except RuntimeError, e: + im, im2 = get_paired_images( + 100, 100, '../data/good_maps/polygon_symbolizer.xml') + assert im.to_string('png32') == im2.to_string('png32') + except RuntimeError as e: # only test datasources that we have installed if not 'Could not create datasource' in str(e): raise RuntimeError(e) + def test_render_points(): - if not mapnik.has_cairo(): return + if not mapnik.has_cairo(): + return # create and populate point datasource (WGS84 lat-lon coordinates) ds = mapnik.MemoryDatasource() context = mapnik.Context() context.push('Name') - f = mapnik.Feature(context,1) + f = mapnik.Feature(context, 1) f['Name'] = 'Westernmost Point' f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.38)') ds.add_feature(f) - f = mapnik.Feature(context,2) + f = mapnik.Feature(context, 2) f['Name'] = 'Southernmost Point' f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.60)') ds.add_feature(f) @@ -136,48 +147,56 @@ def test_render_points(): r = mapnik.Rule() symb = mapnik.PointSymbolizer() symb.allow_overlap = True - r.symbols.append(symb) + r.symbolizers.append(symb) s.rules.append(r) - lyr = mapnik.Layer('Places','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') + lyr = mapnik.Layer( + 'Places', + 'epsg:4326') lyr.datasource = ds lyr.styles.append('places_labels') # latlon bounding box corners - ul_lonlat = mapnik.Coord(142.30,-38.20) - lr_lonlat = mapnik.Coord(143.40,-38.80) + ul_lonlat = mapnik.Coord(142.30, -38.20) + lr_lonlat = mapnik.Coord(143.40, -38.80) # render for different projections projs = { - 'google': '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over', - 'latlon': '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs', + 'google': 'epsg:3857', + 'latlon': 'epsg:4326', 'merc': '+proj=merc +datum=WGS84 +k=1.0 +units=m +over +no_defs', 'utm': '+proj=utm +zone=54 +datum=WGS84' - } - for projdescr in projs.iterkeys(): + } + for projdescr in projs: m = mapnik.Map(1000, 500, projs[projdescr]) - m.append_style('places_labels',s) + m.append_style('places_labels', s) m.layers.append(lyr) dest_proj = mapnik.Projection(projs[projdescr]) - src_proj = mapnik.Projection('+init=epsg:4326') - tr = mapnik.ProjTransform(src_proj,dest_proj) - m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat,lr_lonlat))) - # Render to SVG so that it can be checked how many points are there with string comparison - svg_file = os.path.join(tempfile.gettempdir(), 'mapnik-render-points-%s.svg' % projdescr) + src_proj = mapnik.Projection('epsg:4326') + tr = mapnik.ProjTransform(src_proj, dest_proj) + m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat, lr_lonlat))) + # Render to SVG so that it can be checked how many points are there + # with string comparison + svg_file = os.path.join( + tempfile.gettempdir(), + 'mapnik-render-points-%s.svg' % + projdescr) mapnik.render_to_file(m, svg_file) - num_points_present = len(ds.all_features()) - svg = open(svg_file,'r').read() + num_points_present = len(list(iter(ds))) + with open(svg_file, 'r') as f: + svg = f.read() num_points_rendered = svg.count('=1,True) + # assert len(selected.features)>=1 == True del ds - eq_(os.path.exists(index),True) + assert os.path.exists(index) == True os.unlink(index) - + test_rtree_creation.requires_data = True def test_geometry_round_trip(): @@ -107,42 +113,48 @@ def test_geometry_round_trip(): conn.commit() cur.close() - # add a point as wkb (using mapnik) to match how an ogr created db looks - x = -122 # longitude - y = 48 # latitude - wkt = 'POINT(%s %s)' % (x,y) - # little endian wkb (mapnik will auto-detect and ready either little or big endian (XDR)) + # add a point as wkb (using mapnik) to match how an ogr created db + # looks + x = -122 # longitude + y = 48 # latitude + wkt = 'POINT(%s %s)' % (x, y) + # little endian wkb (mapnik will auto-detect and ready either little or + # big endian (XDR)) wkb = mapnik.Geometry.from_wkt(wkt).to_wkb(mapnik.wkbByteOrder.NDR) - values = (None,sqlite3.Binary(wkb),"test point") + values = (None, sqlite3.Binary(wkb), "test point") cur = conn.cursor() - cur.execute('''INSERT into "point_table" (id,geometry,name) values (?,?,?)''',values) + cur.execute( + '''INSERT into "point_table" (id,geometry,name) values (?,?,?)''', + values) conn.commit() cur.close() conn.close() - def make_wkb_point(x,y): + def make_wkb_point(x, y): import struct - byteorder = 1; # little endian + byteorder = 1 # little endian endianess = '' if byteorder == 1: - endianess = '<' + endianess = '<' else: - endianess = '>' - geom_type = 1; # for a point - return struct.pack('%sbldd' % endianess, byteorder, geom_type, x, y) + endianess = '>' + geom_type = 1 # for a point + return struct.pack('%sbldd' % endianess, + byteorder, geom_type, x, y) # confirm the wkb matches a manually formed wkb - wkb2 = make_wkb_point(x,y) - eq_(wkb,wkb2) + wkb2 = make_wkb_point(x, y) + assert wkb == wkb2 # ensure we can read this data back out properly with mapnik - ds = mapnik.Datasource(**{'type':'sqlite','file':test_db, 'table':'point_table'}) - fs = ds.featureset() - feat = fs.next() - eq_(feat.id(),1) - eq_(feat['name'],'test point') - geom = feat.geometry; - eq_(geom.to_wkt(),'POINT(-122 48)') + ds = mapnik.Datasource( + **{'type': 'sqlite', 'file': test_db, 'table': 'point_table'}) + fs = iter(ds) + feat = next(fs) + assert feat.id() == 1 + assert feat['name'] == 'test point' + geom = feat.geometry + assert geom.to_wkt() == 'POINT(-122 48)' del ds # ensure it matches data read with just sqlite @@ -153,17 +165,12 @@ def make_wkb_point(x,y): result = cur.fetchone() cur.close() feat_id = result[0] - eq_(feat_id,1) + assert feat_id == 1 name = result[2] - eq_(name,'test point') + assert name == 'test point' geom_wkb_blob = result[1] - eq_(str(geom_wkb_blob),geom.to_wkb(mapnik.wkbByteOrder.NDR)) - new_geom = mapnik.Geometry.from_wkb(str(geom_wkb_blob)) - eq_(new_geom.to_wkt(),geom.to_wkt()) + assert geom_wkb_blob == geom.to_wkb(mapnik.wkbByteOrder.NDR) + new_geom = mapnik.Geometry.from_wkb(geom_wkb_blob) + assert new_geom.to_wkt() == geom.to_wkt() conn.close() os.unlink(test_db) - -if __name__ == "__main__": - setup() - returncode = run_all(eval(x) for x in dir() if x.startswith("test_")) - exit(returncode) diff --git a/test/python_tests/sqlite_test.py b/test/python_tests/sqlite_test.py index 69b8a6d91..b98678c78 100644 --- a/test/python_tests/sqlite_test.py +++ b/test/python_tests/sqlite_test.py @@ -1,175 +1,284 @@ -#!/usr/bin/env python - -from nose.tools import eq_, raises -from utilities import execution_path, run_all import os import mapnik +import pytest +from .utilities import execution_path - -def setup(): +@pytest.fixture(scope="module") +def setup_and_teardown(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) - -def teardown(): + yield index = '../data/sqlite/world.sqlite.index' if os.path.exists(index): os.unlink(index) if 'sqlite' in mapnik.DatasourceCache.plugin_names(): - def test_attachdb_with_relative_file(): + def test_attachdb_with_relative_file(setup_and_teardown): # The point table and index is in the qgis_spatiallite.sqlite # database. If either is not found, then this fails ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='point', - attachdb='scratch@qgis_spatiallite.sqlite' - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['pkuid'],1) + table='point', + attachdb='scratch@qgis_spatiallite.sqlite' + ) + fs = iter(ds) + feature = next(fs) + assert feature['pkuid'] == 1 test_attachdb_with_relative_file.requires_data = True def test_attachdb_with_multiple_files(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='attachedtest', - attachdb='scratch1@:memory:,scratch2@:memory:', - initdb=''' + table='attachedtest', + attachdb='scratch1@:memory:,scratch2@:memory:', + initdb=''' create table scratch1.attachedtest (the_geom); create virtual table scratch2.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax); insert into scratch2.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375); ''' - ) - fs = ds.featureset() + ) + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass # the above should not throw but will result in no features - eq_(feature,None) - + assert feature == None + test_attachdb_with_multiple_files.requires_data = True def test_attachdb_with_absolute_file(): # The point table and index is in the qgis_spatiallite.sqlite # database. If either is not found, then this fails ds = mapnik.SQLite(file=os.getcwd() + '/../data/sqlite/world.sqlite', - table='point', - attachdb='scratch@qgis_spatiallite.sqlite' - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['pkuid'],1) + table='point', + attachdb='scratch@qgis_spatiallite.sqlite' + ) + fs = iter(ds) + feature = next(fs) + assert feature['pkuid'] == 1 test_attachdb_with_absolute_file.requires_data = True def test_attachdb_with_index(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='attachedtest', - attachdb='scratch@:memory:', - initdb=''' + table='attachedtest', + attachdb='scratch@:memory:', + initdb=''' create table scratch.attachedtest (the_geom); create virtual table scratch.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax); insert into scratch.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375); ''' - ) + ) - fs = ds.featureset() + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass - eq_(feature,None) - + assert feature == None + test_attachdb_with_index.requires_data = True def test_attachdb_with_explicit_index(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='attachedtest', - index_table='myindex', - attachdb='scratch@:memory:', - initdb=''' + table='attachedtest', + index_table='myindex', + attachdb='scratch@:memory:', + initdb=''' create table scratch.attachedtest (the_geom); create virtual table scratch.myindex using rtree(pkid,xmin,xmax,ymin,ymax); insert into scratch.myindex values (1,-7799225.5,-7778571.0,1393264.125,1417719.375); ''' - ) - fs = ds.featureset() + ) + fs = iter(ds) feature = None try: - feature = fs.next() + feature = next(fs) except StopIteration: pass - eq_(feature,None) - + assert feature == None + test_attachdb_with_explicit_index.requires_data = True def test_attachdb_with_sql_join(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)', - attachdb='busines@business.sqlite' - ) - eq_(len(ds.fields()),29) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int']) - fs = ds.featureset() - feature = fs.next() - eq_(feature.id(),1) + table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)', + attachdb='busines@business.sqlite' + ) + assert len(ds.fields()) == 29 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat', + 'ISO3:1', + '1995', + '1996', + '1997', + '1998', + '1999', + '2000', + '2001', + '2002', + '2003', + '2004', + '2005', + '2006', + '2007', + '2008', + '2009', + '2010'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float', + 'str', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int'] + fs = iter(ds) + feature = next(fs) + assert feature.id() == 1 expected = { - 1995:0, - 1996:0, - 1997:0, - 1998:0, - 1999:0, - 2000:0, - 2001:0, - 2002:0, - 2003:0, - 2004:0, - 2005:0, - 2006:0, - 2007:0, - 2008:0, - 2009:0, - 2010:0, - # this appears to be sqlites way of - # automatically handling clashing column names - 'ISO3:1':'ATG', - 'OGC_FID':1, - 'area':44, - 'fips':u'AC', - 'iso2':u'AG', - 'iso3':u'ATG', - 'lat':17.078, - 'lon':-61.783, - 'name':u'Antigua and Barbuda', - 'pop2005':83039, - 'region':19, - 'subregion':29, - 'un':28 + 1995: 0, + 1996: 0, + 1997: 0, + 1998: 0, + 1999: 0, + 2000: 0, + 2001: 0, + 2002: 0, + 2003: 0, + 2004: 0, + 2005: 0, + 2006: 0, + 2007: 0, + 2008: 0, + 2009: 0, + 2010: 0, + # this appears to be sqlites way of + # automatically handling clashing column names + 'ISO3:1': 'ATG', + 'OGC_FID': 1, + 'area': 44, + 'fips': u'AC', + 'iso2': u'AG', + 'iso3': u'ATG', + 'lat': 17.078, + 'lon': -61.783, + 'name': u'Antigua and Barbuda', + 'pop2005': 83039, + 'region': 19, + 'subregion': 29, + 'un': 28 } - for k,v in expected.items(): + for k, v in expected.items(): try: - eq_(feature[str(k)],v) + assert feature[str(k)] == v except: #import pdb;pdb.set_trace() - print 'invalid key/v %s/%s for: %s' % (k,v,feature) - + print('invalid key/v %s/%s for: %s' % (k, v, feature)) + test_attachdb_with_sql_join.requires_data = True def test_attachdb_with_sql_join_count(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)', - attachdb='busines@business.sqlite' - ) - eq_(len(ds.fields()),29) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int']) - eq_(len(ds.all_features()),100) - + table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)', + attachdb='busines@business.sqlite' + ) + assert len(ds.fields()) == 29 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat', + 'ISO3:1', + '1995', + '1996', + '1997', + '1998', + '1999', + '2000', + '2001', + '2002', + '2003', + '2004', + '2005', + '2006', + '2007', + '2008', + '2009', + '2010'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float', + 'str', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int'] + assert len(list(iter(ds))) == 100 + test_attachdb_with_sql_join_count.requires_data = True def test_attachdb_with_sql_join_count2(): @@ -179,14 +288,70 @@ def test_attachdb_with_sql_join_count2(): select count(*) from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3; ''' ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', - attachdb='busines@business.sqlite' - ) - eq_(len(ds.fields()),29) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int']) - eq_(len(ds.all_features()),192) - + table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', + attachdb='busines@business.sqlite' + ) + assert len(ds.fields()) == 29 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat', + 'ISO3:1', + '1995', + '1996', + '1997', + '1998', + '1999', + '2000', + '2001', + '2002', + '2003', + '2004', + '2005', + '2006', + '2007', + '2008', + '2009', + '2010'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float', + 'str', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int'] + assert len(list(iter(ds))) == 192 + test_attachdb_with_sql_join_count2.requires_data = True def test_attachdb_with_sql_join_count3(): @@ -194,14 +359,70 @@ def test_attachdb_with_sql_join_count3(): select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3; ''' ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from (select * from world_merc where !intersects!) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', - attachdb='busines@business.sqlite' - ) - eq_(len(ds.fields()),29) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int']) - eq_(len(ds.all_features()),192) - + table='(select * from (select * from world_merc where !intersects!) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', + attachdb='busines@business.sqlite' + ) + assert len(ds.fields()) == 29 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat', + 'ISO3:1', + '1995', + '1996', + '1997', + '1998', + '1999', + '2000', + '2001', + '2002', + '2003', + '2004', + '2005', + '2006', + '2007', + '2008', + '2009', + '2010'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float', + 'str', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int'] + assert len(list(iter(ds))) == 192 + test_attachdb_with_sql_join_count3.requires_data = True def test_attachdb_with_sql_join_count4(): @@ -209,14 +430,70 @@ def test_attachdb_with_sql_join_count4(): select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3; ''' ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from (select * from world_merc where !intersects! limit 1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', - attachdb='busines@business.sqlite' - ) - eq_(len(ds.fields()),29) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int']) - eq_(len(ds.all_features()),1) - + table='(select * from (select * from world_merc where !intersects! limit 1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', + attachdb='busines@business.sqlite' + ) + assert len(ds.fields()) == 29 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat', + 'ISO3:1', + '1995', + '1996', + '1997', + '1998', + '1999', + '2000', + '2001', + '2002', + '2003', + '2004', + '2005', + '2006', + '2007', + '2008', + '2009', + '2010'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float', + 'str', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int', + 'int'] + assert len(list(iter(ds))) == 1 + test_attachdb_with_sql_join_count4.requires_data = True def test_attachdb_with_sql_join_count5(): @@ -224,162 +501,234 @@ def test_attachdb_with_sql_join_count5(): select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3; ''' ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from (select * from world_merc where !intersects! and 1=2) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', - attachdb='busines@business.sqlite' - ) - # nothing is able to join to business so we don't pick up business schema - eq_(len(ds.fields()),12) - eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat']) - eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float']) - eq_(len(ds.all_features()),0) - + table='(select * from (select * from world_merc where !intersects! and 1=2) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)', + attachdb='busines@business.sqlite' + ) + # nothing is able to join to business so we don't pick up business + # schema + assert len(ds.fields()) == 12 + assert ds.fields() == ['OGC_FID', + 'fips', + 'iso2', + 'iso3', + 'un', + 'name', + 'area', + 'pop2005', + 'region', + 'subregion', + 'lon', + 'lat'] + assert ds.field_types() == ['int', + 'str', + 'str', + 'str', + 'int', + 'str', + 'int', + 'int', + 'int', + 'int', + 'float', + 'float'] + assert len(list(iter(ds))) == 0 + test_attachdb_with_sql_join_count5.requires_data = True def test_subqueries(): ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='world_merc', - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['OGC_FID'],1) - eq_(feature['fips'],u'AC') - eq_(feature['iso2'],u'AG') - eq_(feature['iso3'],u'ATG') - eq_(feature['un'],28) - eq_(feature['name'],u'Antigua and Barbuda') - eq_(feature['area'],44) - eq_(feature['pop2005'],83039) - eq_(feature['region'],19) - eq_(feature['subregion'],29) - eq_(feature['lon'],-61.783) - eq_(feature['lat'],17.078) + table='world_merc', + ) + fs = iter(ds) + feature = next(fs) + assert feature['OGC_FID'] == 1 + assert feature['fips'] == u'AC' + assert feature['iso2'] == u'AG' + assert feature['iso3'] == u'ATG' + assert feature['un'] == 28 + assert feature['name'] == u'Antigua and Barbuda' + assert feature['area'] == 44 + assert feature['pop2005'] == 83039 + assert feature['region'] == 19 + assert feature['subregion'] == 29 + assert feature['lon'] == -61.783 + assert feature['lat'] == 17.078 ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select * from world_merc)', - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['OGC_FID'],1) - eq_(feature['fips'],u'AC') - eq_(feature['iso2'],u'AG') - eq_(feature['iso3'],u'ATG') - eq_(feature['un'],28) - eq_(feature['name'],u'Antigua and Barbuda') - eq_(feature['area'],44) - eq_(feature['pop2005'],83039) - eq_(feature['region'],19) - eq_(feature['subregion'],29) - eq_(feature['lon'],-61.783) - eq_(feature['lat'],17.078) + table='(select * from world_merc)', + ) + fs = iter(ds) + feature = next(fs) + assert feature['OGC_FID'] == 1 + assert feature['fips'] == u'AC' + assert feature['iso2'] == u'AG' + assert feature['iso3'] == u'ATG' + assert feature['un'] == 28 + assert feature['name'] == u'Antigua and Barbuda' + assert feature['area'] == 44 + assert feature['pop2005'] == 83039 + assert feature['region'] == 19 + assert feature['subregion'] == 29 + assert feature['lon'] == -61.783 + assert feature['lat'] == 17.078 ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select OGC_FID,GEOMETRY from world_merc)', - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['OGC_FID'],1) - eq_(len(feature),1) + table='(select OGC_FID,GEOMETRY from world_merc)', + ) + fs = iter(ds) + feature = next(fs) + assert feature['OGC_FID'] == 1 + assert len(feature) == 1 ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select GEOMETRY,OGC_FID,fips from world_merc)', - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['OGC_FID'],1) - eq_(feature['fips'],u'AC') + table='(select GEOMETRY,OGC_FID,fips from world_merc)', + ) + fs = iter(ds) + feature = next(fs) + assert feature['OGC_FID'] == 1 + assert feature['fips'] == u'AC' # same as above, except with alias like postgres requires # TODO - should we try to make this work? - #ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', + # ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', # table='(select GEOMETRY,rowid as aliased_id,fips from world_merc) as table', # key_field='aliased_id' # ) - #fs = ds.featureset() - #feature = fs.next() - #eq_(feature['aliased_id'],1) - #eq_(feature['fips'],u'AC') + #fs = iter(ds) + #feature = next(fs) + # assert feature['aliased_id'] == 1 + # assert feature['fips'] == u'AC' ds = mapnik.SQLite(file='../data/sqlite/world.sqlite', - table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)', - ) - fs = ds.featureset() - feature = fs.next() - eq_(feature['rowid'],1) - eq_(feature['fips'],u'AC') - + table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)', + ) + fs = iter(ds) + feature = next(fs) + assert feature['rowid'] == 1 + assert feature['fips'] == u'AC' + test_subqueries.requires_data = True def test_empty_db(): ds = mapnik.SQLite(file='../data/sqlite/empty.db', - table='empty', - ) - fs = ds.featureset() + table='empty', + ) + fs = iter(ds) feature = None try: - feature = fs.next() + feature = next(fs) except StopIteration: pass - eq_(feature,None) + assert feature == None test_empty_db.requires_data = True - @raises(RuntimeError) + def test_that_nonexistant_query_field_throws(**kwargs): ds = mapnik.SQLite(file='../data/sqlite/empty.db', - table='empty', - ) - eq_(len(ds.fields()),25) - eq_(ds.fields(),['OGC_FID', 'scalerank', 'labelrank', 'featurecla', 'sovereignt', 'sov_a3', 'adm0_dif', 'level', 'type', 'admin', 'adm0_a3', 'geou_dif', 'name', 'abbrev', 'postal', 'name_forma', 'terr_', 'name_sort', 'map_color', 'pop_est', 'gdp_md_est', 'fips_10_', 'iso_a2', 'iso_a3', 'iso_n3']) - eq_(ds.field_types(),['int', 'int', 'int', 'str', 'str', 'str', 'float', 'float', 'str', 'str', 'str', 'float', 'str', 'str', 'str', 'str', 'str', 'str', 'float', 'float', 'float', 'float', 'str', 'str', 'float']) + table='empty', + ) + assert len(ds.fields()) == 25 + assert ds.fields() == ['OGC_FID', + 'scalerank', + 'labelrank', + 'featurecla', + 'sovereignt', + 'sov_a3', + 'adm0_dif', + 'level', + 'type', + 'admin', + 'adm0_a3', + 'geou_dif', + 'name', + 'abbrev', + 'postal', + 'name_forma', + 'terr_', + 'name_sort', + 'map_color', + 'pop_est', + 'gdp_md_est', + 'fips_10_', + 'iso_a2', + 'iso_a3', + 'iso_n3'] + assert ds.field_types() == ['int', + 'int', + 'int', + 'str', + 'str', + 'str', + 'float', + 'float', + 'str', + 'str', + 'str', + 'float', + 'str', + 'str', + 'str', + 'str', + 'str', + 'str', + 'float', + 'float', + 'float', + 'float', + 'str', + 'str', + 'float'] query = mapnik.Query(ds.envelope()) for fld in ds.fields(): query.add_property_name(fld) # also add an invalid one, triggering throw query.add_property_name('bogus') - ds.features(query) - + with pytest.raises(RuntimeError): + ds.features(query) + test_that_nonexistant_query_field_throws.requires_data = True def test_intersects_token1(): ds = mapnik.SQLite(file='../data/sqlite/empty.db', - table='(select * from empty where !intersects!)', - ) - fs = ds.featureset() + table='(select * from empty where !intersects!)', + ) + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass - eq_(feature,None) - + assert feature == None + test_intersects_token1.requires_data = True def test_intersects_token2(): ds = mapnik.SQLite(file='../data/sqlite/empty.db', - table='(select * from empty where "a"!="b" and !intersects!)', - ) - fs = ds.featureset() + table='(select * from empty where "a"!="b" and !intersects!)', + ) + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass - eq_(feature,None) - + assert feature == None + test_intersects_token2.requires_data = True def test_intersects_token3(): ds = mapnik.SQLite(file='../data/sqlite/empty.db', - table='(select * from empty where "a"!="b" and !intersects!)', - ) - fs = ds.featureset() + table='(select * from empty where "a"!="b" and !intersects!)', + ) + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass - eq_(feature,None) - + assert feature == None + test_intersects_token3.requires_data = True # https://github.com/mapnik/mapnik/issues/1537 @@ -388,114 +737,108 @@ def test_db_with_one_text_column(): # form up an in-memory test db wkb = '010100000000000000000000000000000000000000' ds = mapnik.SQLite(file=':memory:', - table='test1', - initdb=''' + table='test1', + initdb=''' create table test1 (alias TEXT,geometry BLOB); insert into test1 values ("test",x'%s'); ''' % wkb, - extent='-180,-60,180,60', - use_spatial_index=False, - key_field='alias' - ) - eq_(len(ds.fields()),1) - eq_(ds.fields(),['alias']) - eq_(ds.field_types(),['str']) - fs = ds.all_features() - eq_(len(fs),1) + extent='-180,-60,180,60', + use_spatial_index=False, + key_field='alias' + ) + assert len(ds.fields()) == 1 + assert ds.fields() == ['alias'] + assert ds.field_types() == ['str'] + fs = list(iter(ds)) + assert len(fs) == 1 feat = fs[0] - eq_(feat.id(),0) # should be 1? - eq_(feat['alias'],'test') - eq_(feat.geometry.to_wkt(),'POINT(0 0)') + assert feat.id() == 0 # should be 1? + assert feat['alias'] == 'test' + assert feat.geometry.to_wkt() == 'POINT(0 0)' def test_db_with_one_untyped_column(): # form up an in-memory test db wkb = '010100000000000000000000000000000000000000' ds = mapnik.SQLite(file=':memory:', - table='test1', - initdb=''' + table='test1', + initdb=''' create table test1 (geometry BLOB, untyped); insert into test1 values (x'%s', 'untyped'); ''' % wkb, - extent='-180,-60,180,60', - use_spatial_index=False, - key_field='rowid' - ) + extent='-180,-60,180,60', + use_spatial_index=False, + key_field='rowid' + ) # ensure the untyped column is found - eq_(len(ds.fields()),2) - eq_(ds.fields(),['rowid', 'untyped']) - eq_(ds.field_types(),['int', 'str']) + assert len(ds.fields()) == 2 + assert ds.fields(), ['rowid' == 'untyped'] + assert ds.field_types(), ['int' == 'str'] def test_db_with_one_untyped_column_using_subquery(): # form up an in-memory test db wkb = '010100000000000000000000000000000000000000' ds = mapnik.SQLite(file=':memory:', - table='(SELECT rowid, geometry, untyped FROM test1)', - initdb=''' + table='(SELECT rowid, geometry, untyped FROM test1)', + initdb=''' create table test1 (geometry BLOB, untyped); insert into test1 values (x'%s', 'untyped'); ''' % wkb, - extent='-180,-60,180,60', - use_spatial_index=False, - key_field='rowid' - ) + extent='-180,-60,180,60', + use_spatial_index=False, + key_field='rowid' + ) # ensure the untyped column is found - eq_(len(ds.fields()),3) - eq_(ds.fields(),['rowid', 'untyped', 'rowid']) - eq_(ds.field_types(),['int', 'str', 'int']) - + assert len(ds.fields()) == 3 + assert ds.fields(), ['rowid', 'untyped' == 'rowid'] + assert ds.field_types(), ['int', 'str' == 'int'] def test_that_64bit_int_fields_work(): ds = mapnik.SQLite(file='../data/sqlite/64bit_int.sqlite', - table='int_table', - use_spatial_index=False - ) - eq_(len(ds.fields()),3) - eq_(ds.fields(),['OGC_FID','id','bigint']) - eq_(ds.field_types(),['int','int','int']) - fs = ds.featureset() - feat = fs.next() - eq_(feat.id(),1) - eq_(feat['OGC_FID'],1) - eq_(feat['bigint'],2147483648) - feat = fs.next() - eq_(feat.id(),2) - eq_(feat['OGC_FID'],2) - eq_(feat['bigint'],922337203685477580) + table='int_table', + use_spatial_index=False + ) + assert len(ds.fields()) == 3 + assert ds.fields(), ['OGC_FID', 'id' == 'bigint'] + assert ds.field_types(), ['int', 'int' == 'int'] + fs = iter(ds) + feat = next(fs) + assert feat.id() == 1 + assert feat['OGC_FID'] == 1 + assert feat['bigint'] == 2147483648 + feat = next(fs) + assert feat.id() == 2 + assert feat['OGC_FID'] == 2 + assert feat['bigint'] == 922337203685477580 test_that_64bit_int_fields_work.requires_data = True def test_null_id_field(): - # silence null key warning: https://github.com/mapnik/mapnik/issues/1889 + # silence null key warning: + # https://github.com/mapnik/mapnik/issues/1889 default_logging_severity = mapnik.logger.get_severity() - mapnik.logger.set_severity(mapnik.severity_type.None) + mapnik.logger.set_severity(getattr(mapnik.severity_type, "None")) # form up an in-memory test db wkb = '010100000000000000000000000000000000000000' # note: the osm_id should be declared INTEGER PRIMARY KEY # but in this case we intentionally do not make this a valid pkey # otherwise sqlite would turn the null into a valid, serial id ds = mapnik.SQLite(file=':memory:', - table='test1', - initdb=''' + table='test1', + initdb=''' create table test1 (osm_id INTEGER,geometry BLOB); insert into test1 values (null,x'%s'); ''' % wkb, - extent='-180,-60,180,60', - use_spatial_index=False, - key_field='osm_id' - ) - fs = ds.featureset() + extent='-180,-60,180,60', + use_spatial_index=False, + key_field='osm_id' + ) + fs = iter(ds) feature = None - try : - feature = fs.next() + try: + feature = next(fs) except StopIteration: pass - eq_(feature,None) + assert feature == None mapnik.logger.set_severity(default_logging_severity) - -if __name__ == "__main__": - setup() - result = run_all(eval(x) for x in dir() if x.startswith("test_")) - teardown() - exit(result) diff --git a/test/python_tests/style_test.py b/test/python_tests/style_test.py index 7bc782a4b..00dca93da 100644 --- a/test/python_tests/style_test.py +++ b/test/python_tests/style_test.py @@ -1,18 +1,10 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from nose.tools import eq_ -from utilities import run_all import mapnik def test_style_init(): - s = mapnik.Style() - eq_(s.filter_mode,mapnik.filter_mode.ALL) - eq_(len(s.rules),0) - eq_(s.opacity,1) - eq_(s.comp_op,None) - eq_(s.image_filters,"") - eq_(s.image_filters_inflate,False) - -if __name__ == "__main__": - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + s = mapnik.Style() + assert s.filter_mode == mapnik.filter_mode.ALL + assert len(s.rules) == 0 + assert s.opacity == 1 + assert s.comp_op == None + assert s.image_filters == "" + assert not s.image_filters_inflate diff --git a/test/python_tests/topojson_plugin_test.py b/test/python_tests/topojson_plugin_test.py index a5f3e573d..ec92c696c 100644 --- a/test/python_tests/topojson_plugin_test.py +++ b/test/python_tests/topojson_plugin_test.py @@ -1,91 +1,92 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +import mapnik +import pytest +import os -from nose.tools import eq_,assert_almost_equal -from utilities import execution_path, run_all -import os, mapnik +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if 'topojson' in mapnik.DatasourceCache.plugin_names(): - def test_topojson_init(): - # topojson tests/data/json/escaped.geojson -o tests/data/json/escaped.topojson --properties + def test_topojson_init(setup): + # topojson tests/data/json/escaped.geojson -o tests/data/topojson/escaped.topojson --properties # topojson version 1.4.2 - ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson') + ds = mapnik.Datasource( + type='topojson', + file='../data/topojson/escaped.topojson') e = ds.envelope() - assert_almost_equal(e.minx, -81.705583, places=7) - assert_almost_equal(e.miny, 41.480573, places=6) - assert_almost_equal(e.maxx, -81.705583, places=5) - assert_almost_equal(e.maxy, 41.480573, places=3) + assert e.minx == pytest.approx(-81.705583, 1e-7) + assert e.miny == pytest.approx( 41.480573, 1e-6) + assert e.maxx == pytest.approx(-81.705583, 1e-5) + assert e.maxy == pytest.approx(41.480573, 1e-3) def test_topojson_properties(): - ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson') - f = ds.features_at_point(ds.envelope().center()).features[0] - eq_(len(ds.fields()),7) - desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) - - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') - - ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson') - f = ds.all_features()[0] - eq_(len(ds.fields()),7) + ds = mapnik.Datasource( + type='topojson', + file='../data/topojson/escaped.topojson') - desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + f = list(ds.features_at_point(ds.envelope().center()))[0] + assert len(ds.fields()) == 11 + desc = ds.describe() + assert desc['geometry_type'] == mapnik.DataGeometryType.Point - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' def test_geojson_from_in_memory_string(): - ds = mapnik.Datasource(type='topojson',inline=open('../data/json/escaped.topojson','r').read()) - f = ds.all_features()[0] - eq_(len(ds.fields()),7) - + ds = mapnik.Datasource( + type='topojson', + inline=open( + '../data/topojson/escaped.topojson', + 'r').read()) + f = list(ds.features_at_point(ds.envelope().center()))[0] + assert len(ds.fields()) == 11 desc = ds.describe() - eq_(desc['geometry_type'],mapnik.DataGeometryType.Point) + assert desc['geometry_type'] == mapnik.DataGeometryType.Point - eq_(f['name'], u'Test') - eq_(f['int'], 1) - eq_(f['description'], u'Test: \u005C') - eq_(f['spaces'], u'this has spaces') - eq_(f['double'], 1.1) - eq_(f['boolean'], True) - eq_(f['NOM_FR'], u'Qu\xe9bec') - eq_(f['NOM_FR'], u'Québec') + assert f['name'] == u'Test' + assert f['int'] == 1 + assert f['description'] == u'Test: \u005C' + assert f['spaces'] == u'this has spaces' + assert f['double'] == 1.1 + assert f['boolean'] == True + assert f['NOM_FR'] == u'Qu\xe9bec' + assert f['NOM_FR'] == u'Québec' -# @raises(RuntimeError) + #@raises(RuntimeError) def test_that_nonexistant_query_field_throws(**kwargs): - ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson') - eq_(len(ds.fields()),7) + #with pytest.raises(RuntimeError): + ds = mapnik.Datasource( + type='topojson', + file='../data/topojson/escaped.topojson') + assert len(ds.fields()) == 11 # TODO - this sorting is messed up - eq_(ds.fields(),['name', 'int', 'description', 'spaces', 'double', 'boolean', 'NOM_FR']) - eq_(ds.field_types(),['str', 'int', 'str', 'str', 'float', 'bool', 'str']) -# TODO - should topojson plugin throw like others? -# query = mapnik.Query(ds.envelope()) -# for fld in ds.fields(): -# query.add_property_name(fld) -# # also add an invalid one, triggering throw -# query.add_property_name('bogus') -# fs = ds.features(query) + assert ds.fields() == ['name', 'int', 'description', + 'spaces', 'double', 'boolean', 'NOM_FR', + 'object', 'array', 'empty_array', 'empty_object'] + assert ds.field_types() == ['str', 'int', + 'str', 'str', 'float', 'bool', 'str', + 'str', 'str', 'str', 'str'] + # TODO - should topojson plugin throw like others? + query = mapnik.Query(ds.envelope()) + for fld in ds.fields(): + query.add_property_name(fld) + # also add an invalid one, triggering throw + query.add_property_name('bogus') + fs = ds.features(query) + -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) +#if __name__ == "__main__": + #setup() +# exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) diff --git a/test/python_tests/utilities.py b/test/python_tests/utilities.py index fe02c7d63..0aa3cdf92 100644 --- a/test/python_tests/utilities.py +++ b/test/python_tests/utilities.py @@ -1,24 +1,18 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin -from nose.tools import assert_almost_equal - -import os, sys, traceback +import os +import sys +import traceback import mapnik +import pytest +READ_FLAGS = 'rb' HERE = os.path.dirname(__file__) def execution_path(filename): - return os.path.join(os.path.dirname(sys._getframe(1).f_code.co_filename), filename) - -class Todo(Exception): - pass - -class TodoPlugin(ErrorClassPlugin): - name = "todo" - - todo = ErrorClass(Todo, label='TODO', isfailure=False) + return os.path.join(os.path.dirname( + sys._getframe(1).f_code.co_filename), filename) def contains_word(word, bytestring_): """ @@ -37,66 +31,81 @@ def contains_word(word, bytestring_): AssertionError: len(bytestring_) not multiple of len(word) """ n = len(word) - assert len(bytestring_)%n == 0, "len(bytestring_) not multiple of len(word)" - chunks = [bytestring_[i:i+n] for i in xrange(0, len(bytestring_), n)] + assert len(bytestring_) % n == 0, "len(bytestring_) not multiple of len(word)" + chunks = [bytestring_[i:i + n] for i in range(0, len(bytestring_), n)] return word in chunks + def pixel2channels(pixel): alpha = (pixel >> 24) & 0xff red = pixel & 0xff green = (pixel >> 8) & 0xff blue = (pixel >> 16) & 0xff - return red,green,blue,alpha + return red, green, blue, alpha + def pixel2rgba(pixel): return 'rgba(%s,%s,%s,%s)' % pixel2channels(pixel) + def get_unique_colors(im): pixels = [] for x in range(im.width()): for y in range(im.height()): - pixel = im.get_pixel(x,y) + pixel = im.get_pixel(x, y) if pixel not in pixels: - pixels.append(pixel) + pixels.append(pixel) pixels = sorted(pixels) - return map(pixel2rgba,pixels) - -def run_all(iterable): - failed = 0 - for test in iterable: - try: - test() - sys.stderr.write("\x1b[32m✓ \x1b[m" + test.__name__ + "\x1b[m\n") - except: - exc_type, exc_value, exc_tb = sys.exc_info() - failed += 1 - sys.stderr.write("\x1b[31m✘ \x1b[m" + test.__name__ + "\x1b[m\n") - for mline in traceback.format_exception_only(exc_type, exc_value): - for line in mline.rstrip().split("\n"): - sys.stderr.write(" \x1b[31m" + line + "\x1b[m\n") - sys.stderr.write(" Traceback:\n") - for mline in traceback.format_tb(exc_tb): - for line in mline.rstrip().split("\n"): - if not 'utilities.py' in line and not 'trivial.py' in line and not line.strip() == 'test()': - sys.stderr.write(" " + line + "\n") - sys.stderr.flush() - return failed + return list(map(pixel2rgba, pixels)) def side_by_side_image(left_im, right_im): width = left_im.width() + 1 + right_im.width() height = max(left_im.height(), right_im.height()) im = mapnik.Image(width, height) - im.composite(left_im,mapnik.CompositeOp.src_over,1.0,0,0) + im.composite(left_im, mapnik.CompositeOp.src_over, 1.0, 0, 0) if width > 80: - im.composite(mapnik.Image.open(HERE+'/images/expected.png'),mapnik.CompositeOp.difference,1.0,0,0) - im.composite(right_im,mapnik.CompositeOp.src_over,1.0,left_im.width() + 1, 0) + im.composite( + mapnik.Image.open( + HERE + + '/images/expected.png'), + mapnik.CompositeOp.difference, + 1.0, + 0, + 0) + im.composite( + right_im, + mapnik.CompositeOp.src_over, + 1.0, + left_im.width() + 1, + 0) if width > 80: - im.composite(mapnik.Image.open(HERE+'/images/actual.png'),mapnik.CompositeOp.difference,1.0,left_im.width() + 1, 0) + im.composite( + mapnik.Image.open( + HERE + + '/images/actual.png'), + mapnik.CompositeOp.difference, + 1.0, + left_im.width() + + 1, + 0) return im + def assert_box2d_almost_equal(a, b, msg=None): msg = msg or ("%r != %r" % (a, b)) - assert_almost_equal(a.minx, b.minx, msg=msg) - assert_almost_equal(a.maxx, b.maxx, msg=msg) - assert_almost_equal(a.miny, b.miny, msg=msg) - assert_almost_equal(a.maxy, b.maxy, msg=msg) + assert a.minx == pytest.approx(b.minx, abs=1e-2), msg + assert a.maxx == pytest.approx(b.maxx, abs=1e-2), msg + assert a.miny == pytest.approx(b.miny, abs=1e-2), msg + assert a.maxy == pytest.approx(b.maxy, abs=1e-2), msg + + +def images_almost_equal(image1, image2, tolerance = 1): + def rgba(p): + return p & 0xff,(p >> 8) & 0xff,(p >> 16) & 0xff, p >> 24 + assert image1.width() == image2.width() + assert image1.height() == image2.height() + for x in range(image1.width()): + for y in range(image1.height()): + p1 = image1.get_pixel(x, y) + p2 = image2.get_pixel(x, y) + assert rgba(p1) == pytest.approx(rgba(p2), abs = tolerance) diff --git a/test/python_tests/webp_encoding_test.py b/test/python_tests/webp_encoding_test.py index 91e23fc1c..4af0950a9 100644 --- a/test/python_tests/webp_encoding_test.py +++ b/test/python_tests/webp_encoding_test.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +import mapnik +import os +import pytest -import os, mapnik -from nose.tools import raises,eq_ -from utilities import execution_path, run_all +from .utilities import execution_path +@pytest.fixture(scope="module") def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) + yield if mapnik.has_webp(): tmp_dir = '/tmp/mapnik-webp/' if not os.path.exists(tmp_dir): - os.makedirs(tmp_dir) + os.makedirs(tmp_dir) opts = [ 'webp', @@ -38,30 +39,32 @@ def setup(): 'webp:target_size=100' ] + def gen_filepath(name, format): + return os.path.join('images/support/encoding-opts', + name + '-' + format.replace(":", "+") + '.webp') - def gen_filepath(name,format): - return os.path.join('images/support/encoding-opts',name+'-'+format.replace(":","+")+'.webp') + def test_quality_threshold(setup): + im = mapnik.Image(256, 256) + im.to_string('webp:quality=99.99000') + im.to_string('webp:quality=0') + im.to_string('webp:quality=0.001') - def test_quality_threshold(): - im = mapnik.Image(256,256) - im.tostring('webp:quality=99.99000') - im.tostring('webp:quality=0') - im.tostring('webp:quality=0.001') - @raises(RuntimeError) def test_quality_threshold_invalid(): - im = mapnik.Image(256,256) - im.tostring('webp:quality=101') + im = mapnik.Image(256, 256) + with pytest.raises(RuntimeError): + im.to_string('webp:quality=101') + - @raises(RuntimeError) def test_quality_threshold_invalid2(): - im = mapnik.Image(256,256) - im.tostring('webp:quality=-1') - - @raises(RuntimeError) + im = mapnik.Image(256, 256) + with pytest.raises(RuntimeError): + im.to_string('webp:quality=-1') + def test_quality_threshold_invalid3(): - im = mapnik.Image(256,256) - im.tostring('webp:quality=101.1') + im = mapnik.Image(256, 256) + with pytest.raises(RuntimeError): + im.to_string('webp:quality=101.1') generate = os.environ.get('UPDATE') @@ -69,74 +72,87 @@ def test_expected_encodings(): fails = [] try: for opt in opts: - im = mapnik.Image(256,256) - expected = gen_filepath('blank',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + im = mapnik.Image(256, 256) + expected = gen_filepath('blank', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) - im.save(actual,opt) + print('generating expected image', expected) + im.save(expected, opt) + im.save(actual, opt) try: - expected_bytes = mapnik.Image.open(expected).tostring() + expected_bytes = mapnik.Image.open(expected).to_string() except RuntimeError: - # this will happen if libweb is old, since it cannot open images created by more recent webp - print 'warning, cannot open webp expected image (your libwebp is likely too old)' + # this will happen if libweb is old, since it cannot open + # images created by more recent webp + print( + 'warning, cannot open webp expected image (your libwebp is likely too old)') continue - if mapnik.Image.open(actual).tostring() != expected_bytes: - fails.append('%s (actual) not == to %s (expected)' % (actual,expected)) + if mapnik.Image.open(actual).to_string() != expected_bytes: + fails.append( + '%s (actual) not == to %s (expected)' % + (actual, expected)) for opt in opts: - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color('green')) - expected = gen_filepath('solid',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + expected = gen_filepath('solid', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) - im.save(actual,opt) + print('generating expected image', expected) + im.save(expected, opt) + im.save(actual, opt) try: - expected_bytes = mapnik.Image.open(expected).tostring() + expected_bytes = mapnik.Image.open(expected).to_string() except RuntimeError: - # this will happen if libweb is old, since it cannot open images created by more recent webp - print 'warning, cannot open webp expected image (your libwebp is likely too old)' + # this will happen if libweb is old, since it cannot open + # images created by more recent webp + print( + 'warning, cannot open webp expected image (your libwebp is likely too old)') continue - if mapnik.Image.open(actual).tostring() != expected_bytes: - fails.append('%s (actual) not == to %s (expected)' % (actual,expected)) + if mapnik.Image.open(actual).to_string() != expected_bytes: + fails.append( + '%s (actual) not == to %s (expected)' % + (actual, expected)) for opt in opts: - im = mapnik.Image.open('images/support/transparency/aerial_rgba.png') - expected = gen_filepath('aerial_rgba',opt) - actual = os.path.join(tmp_dir,os.path.basename(expected)) + im = mapnik.Image.open( + 'images/support/transparency/aerial_rgba.png') + expected = gen_filepath('aerial_rgba', opt) + actual = os.path.join(tmp_dir, os.path.basename(expected)) if generate or not os.path.exists(expected): - print 'generating expected image %s' % expected - im.save(expected,opt) - im.save(actual,opt) + print('generating expected image', expected) + im.save(expected, opt) + im.save(actual, opt) try: - expected_bytes = mapnik.Image.open(expected).tostring() + expected_bytes = mapnik.Image.open(expected).to_string() except RuntimeError: - # this will happen if libweb is old, since it cannot open images created by more recent webp - print 'warning, cannot open webp expected image (your libwebp is likely too old)' + # this will happen if libweb is old, since it cannot open + # images created by more recent webp + print( + 'warning, cannot open webp expected image (your libwebp is likely too old)') continue - if mapnik.Image.open(actual).tostring() != expected_bytes: - fails.append('%s (actual) not == to %s (expected)' % (actual,expected)) + if mapnik.Image.open(actual).to_string() != expected_bytes: + fails.append( + '%s (actual) not == to %s (expected)' % + (actual, expected)) # disabled to avoid failures on ubuntu when using old webp packages - #eq_(fails,[],'\n'+'\n'.join(fails)) - except RuntimeError, e: - print e + # assert fails,[] == '\n'+'\n'.join(fails) + except RuntimeError as e: + print(e) def test_transparency_levels(): try: # create partial transparency image - im = mapnik.Image(256,256) + im = mapnik.Image(256, 256) im.fill(mapnik.Color('rgba(255,255,255,.5)')) c2 = mapnik.Color('rgba(255,255,0,.2)') c3 = mapnik.Color('rgb(0,255,255)') - for y in range(0,im.height()/2): - for x in range(0,im.width()/2): - im.set_pixel(x,y,c2) - for y in range(im.height()/2,im.height()): - for x in range(im.width()/2,im.width()): - im.set_pixel(x,y,c3) + for y in range(0, int(im.height() / 2)): + for x in range(0, int(im.width() / 2)): + im.set_pixel(x, y, c2) + for y in range(int(im.height() / 2), im.height()): + for x in range(int(im.width() / 2), im.width()): + im.set_pixel(x, y, c3) t0 = tmp_dir + 'white0-actual.webp' @@ -145,20 +161,17 @@ def test_transparency_levels(): expected = 'images/support/transparency/white0.webp' if generate or not os.path.exists(expected): im.save('images/support/transparency/white0.webp') - im.save(t0,format) + im.save(t0, format) im_in = mapnik.Image.open(t0) - t0_len = len(im_in.tostring(format)) + t0_len = len(im_in.to_string(format)) try: - expected_bytes = mapnik.Image.open(expected).tostring(format) + expected_bytes = mapnik.Image.open(expected).to_string(format) except RuntimeError: - # this will happen if libweb is old, since it cannot open images created by more recent webp - print 'warning, cannot open webp expected image (your libwebp is likely too old)' + # this will happen if libweb is old, since it cannot open + # images created by more recent webp + print( + 'warning, cannot open webp expected image (your libwebp is likely too old)') return - eq_(t0_len,len(expected_bytes)) - except RuntimeError, e: - print e - - -if __name__ == "__main__": - setup() - exit(run_all(eval(x) for x in dir() if x.startswith("test_"))) + assert t0_len == len(expected_bytes) + except RuntimeError as e: + print(e) diff --git a/test/run_tests.py b/test/run_tests.py index edf797465..4430d3755 100755 --- a/test/run_tests.py +++ b/test/run_tests.py @@ -1,27 +1,34 @@ #!/usr/bin/env python +import getopt +import os import sys + +from python_tests.utilities import TodoPlugin + try: import nose -except ImportError, e: - sys.stderr.write("Unable to run python tests: the third party 'nose' module is required\nTo install 'nose' do:\n\tsudo pip install nose (or on debian systems: apt-get install python-nose): %s\n" % e) +except ImportError as e: + sys.stderr.write( + "Unable to run python tests: the third party 'nose' module is required" + "\nTo install 'nose' do:" + "\n\tsudo pip install nose (or on debian systems: " + "apt-get install python-nose): %s\n" % e) sys.exit(1) +else: + from nose.plugins.doctests import Doctest -import mapnik -from python_tests.utilities import TodoPlugin -from nose.plugins.doctests import Doctest - -import nose, sys, os, getopt def usage(): print("test.py -h | --help") print("test.py [-q | -v] [-p | --prefix ]") + def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hvqp:", ["help", "prefix="]) - except getopt.GetoptError,err: + except getopt.GetoptError as err: print(str(err)) usage() sys.exit(2) @@ -49,27 +56,41 @@ def main(): if prefix: # Allow python to find libraries for testing on the buildbot - sys.path.insert(0, os.path.join(prefix, "lib/python%s/site-packages" % sys.version[:3])) + sys.path.insert( + 0, + os.path.join( + prefix, + "lib/python%s/site-packages" % + sys.version[ + :3])) import mapnik if not quiet: print("- mapnik path: %s" % mapnik.__file__) - if hasattr(mapnik,'_mapnik'): - print("- _mapnik.so path: %s" % mapnik._mapnik.__file__) - if hasattr(mapnik,'inputpluginspath'): - print ("- Input plugins path: %s" % mapnik.inputpluginspath) - if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): - print ("- MAPNIK_INPUT_PLUGINS_DIRECTORY env: %s" % os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY')) - if hasattr(mapnik,'fontscollectionpath'): + if hasattr(mapnik, '_mapnik'): + print("- _mapnik.so path: %s" % mapnik._mapnik.__file__) + if hasattr(mapnik, 'inputpluginspath'): + print("- Input plugins path: %s" % mapnik.inputpluginspath) + if 'MAPNIK_INPUT_PLUGINS_DIRECTORY' in os.environ: + print("- MAPNIK_INPUT_PLUGINS_DIRECTORY env: %s" % + os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY')) + if hasattr(mapnik, 'fontscollectionpath'): print("- Font path: %s" % mapnik.fontscollectionpath) - if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): - print ("- MAPNIK_FONT_DIRECTORY env: %s" % os.environ.get('MAPNIK_FONT_DIRECTORY')) + if 'MAPNIK_FONT_DIRECTORY' in os.environ: + print( + "- MAPNIK_FONT_DIRECTORY env: %s" % + os.environ.get('MAPNIK_FONT_DIRECTORY')) print('') print("- Running nosetests:") print('') - argv = [__file__, '--exe', '--with-todo', '--with-doctest', '--doctest-tests'] + argv = [ + __file__, + '--exe', + '--with-todo', + '--with-doctest', + '--doctest-tests'] if not quiet: argv.append('-v') @@ -80,7 +101,7 @@ def main(): argv.append('-v') dirname = os.path.dirname(sys.argv[0]) - argv.extend(['-w', os.path.join(dirname,'python_tests')]) + argv.extend(['-w', os.path.join(dirname, 'python_tests')]) if not nose.run(argv=argv, plugins=[TodoPlugin(), Doctest()]): sys.exit(1) diff --git a/test/visual.py b/test/visual.py index 32ad7f420..e4b918ab4 100755 --- a/test/visual.py +++ b/test/visual.py @@ -1,15 +1,18 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from __future__ import print_function + +import glob import os +import platform +import shutil import sys + import mapnik -import shutil -import platform -import glob -#mapnik.logger.set_severity(mapnik.severity_type.None) -#mapnik.logger.set_severity(mapnik.severity_type.Debug) +# mapnik.logger.set_severity(mapnik.severity_type.None) +# mapnik.logger.set_severity(mapnik.severity_type.Debug) try: import json @@ -21,7 +24,7 @@ defaults = { 'status': True, 'sizes': [(500, 100)], - 'scales':[1.0,2.0], + 'scales': [1.0, 2.0], 'agg': True, 'cairo': mapnik.has_cairo(), 'grid': mapnik.has_grid_renderer() @@ -39,43 +42,46 @@ agg_threshold = 12 grid_threshold = 6 + def render_cairo(m, output, scale_factor): mapnik.render_to_file(m, output, 'ARGB32', scale_factor) # open and re-save as png8 to save space new_im = mapnik.Image.open(output) new_im.save(output, 'png32') + def render_grid(m, output, scale_factor): grid = mapnik.Grid(m.width, m.height) mapnik.render_layer(m, grid, layer=0, scale_factor=scale_factor) utf1 = grid.encode('utf', resolution=4) - open(output,'wb').write(json.dumps(utf1, indent=1)) + open(output, 'wb').write(json.dumps(utf1, indent=1).encode()) + def render_agg(m, output, scale_factor): mapnik.render_to_file(m, output, 'png32', scale_factor), renderers = [ - { 'name': 'agg', - 'render': render_agg, - 'compare': lambda actual, reference: compare(actual, reference, alpha=True), - 'threshold': agg_threshold, - 'filetype': 'png', - 'dir': 'images' - }, - { 'name': 'cairo', - 'render': render_cairo, - 'compare': lambda actual, reference: compare(actual, reference, alpha=False), - 'threshold': cairo_threshold, - 'filetype': 'png', - 'dir': 'images' - }, - { 'name': 'grid', - 'render': render_grid, - 'compare': lambda actual, reference: compare_grids(actual, reference, alpha=False), - 'threshold': grid_threshold, - 'filetype': 'json', - 'dir': 'grids' - } + {'name': 'agg', + 'render': render_agg, + 'compare': lambda actual, reference: compare(actual, reference, alpha=True), + 'threshold': agg_threshold, + 'filetype': 'png', + 'dir': 'images' + }, + {'name': 'cairo', + 'render': render_cairo, + 'compare': lambda actual, reference: compare(actual, reference, alpha=False), + 'threshold': cairo_threshold, + 'filetype': 'png', + 'dir': 'images' + }, + {'name': 'grid', + 'render': render_grid, + 'compare': lambda actual, reference: compare_grids(actual, reference, alpha=False), + 'threshold': grid_threshold, + 'filetype': 'json', + 'dir': 'grids' + } ] COMPUTE_THRESHOLD = 16 @@ -86,10 +92,13 @@ def render_agg(m, output, scale_factor): COMPUTE_THRESHOLD = 2 # compare two images and return number of different pixels + + def compare(actual, expected, alpha=True): im1 = mapnik.Image.open(actual) im2 = mapnik.Image.open(expected) - return im1.compare(im2,COMPUTE_THRESHOLD, alpha) + return im1.compare(im2, COMPUTE_THRESHOLD, alpha) + def compare_grids(actual, expected, threshold=0, alpha=True): global errors @@ -112,30 +121,32 @@ def compare_grids(actual, expected, threshold=0, alpha=True): height2 = len(grid2) if not height1 == height2: return 99999999 - diff = 0; - for y in range(0,height1-1): + diff = 0 + for y in range(0, height1 - 1): row1 = grid1[y] row2 = grid2[y] - width = min(len(row1),len(row2)) - for w in range(0,width): + width = min(len(row1), len(row2)) + for w in range(0, width): if row1[w] != row2[w]: diff += 1 return diff -dirname = os.path.join(os.path.dirname(__file__),'data-visual') +dirname = os.path.join(os.path.dirname(__file__), 'data-visual') + class Reporting: DIFF = 1 NOT_FOUND = 2 OTHER = 3 REPLACE = 4 - def __init__(self, quiet, overwrite_failures = False): + + def __init__(self, quiet, overwrite_failures=False): self.quiet = quiet self.passed = 0 self.failed = 0 self.overwrite_failures = overwrite_failures - self.errors = [ #(type, actual, expected, diff, message) - ] + self.errors = [ # (type, actual, expected, diff, message) + ] def result_fail(self, actual, expected, diff): self.failed += 1 @@ -145,7 +156,9 @@ def result_fail(self, actual, expected, diff): else: sys.stderr.write('\x1b[31m.\x1b[0m') else: - print '\x1b[31m✘\x1b[0m (\x1b[34m%u different pixels\x1b[0m)' % diff + print( + '\x1b[31m✘\x1b[0m (\x1b[34m%u different pixels\x1b[0m)' % + diff) if self.overwrite_failures: self.errors.append((self.REPLACE, actual, expected, diff, None)) @@ -163,9 +176,9 @@ def result_pass(self, actual, expected, diff): sys.stderr.write('\x1b[32m.\x1b[0m') else: if platform.uname()[0] == 'Windows': - print '\x1b[32m✓\x1b[0m' + print('\x1b[32m✓\x1b[0m') else: - print '✓' + print('✓') def not_found(self, actual, expected): self.failed += 1 @@ -173,7 +186,8 @@ def not_found(self, actual, expected): if self.quiet: sys.stderr.write('\x1b[33m.\x1b[0m') else: - print '\x1b[33m?\x1b[0m (\x1b[34mReference file not found, creating\x1b[0m)' + print( + '\x1b[33m?\x1b[0m (\x1b[34mReference file not found, creating\x1b[0m)') contents = open(actual, 'r').read() open(expected, 'wb').write(contents) @@ -183,16 +197,16 @@ def other_error(self, expected, message): if self.quiet: sys.stderr.write('\x1b[31m.\x1b[0m') else: - print '\x1b[31m✘\x1b[0m (\x1b[34m%s\x1b[0m)' % message + print('\x1b[31m✘\x1b[0m (\x1b[34m%s\x1b[0m)' % message) - def make_html_item(self,actual,expected,diff): + def make_html_item(self, actual, expected, diff): item = ''' - ''' % (expected,expected,'%') + ''' % (expected, expected, '%') item += '
%s
' % (diff) item += '''
@@ -200,36 +214,57 @@ def make_html_item(self,actual,expected,diff):
- ''' % (actual,actual,'%') + ''' % (actual, actual, '%') return item def summary(self): if len(self.errors) == 0: - print '\nAll %s visual tests passed: \x1b[1;32m✓ \x1b[0m' % self.passed + print( + '\nAll %s visual tests passed: \x1b[1;32m✓ \x1b[0m' % + self.passed) return 0 sortable_errors = [] - print "\nVisual rendering: %s failed / %s passed" % (len(self.errors), self.passed) + print("\nVisual rendering: %s failed / %s passed" % + (len(self.errors), self.passed)) for idx, error in enumerate(self.errors): if error[0] == self.OTHER: - print str(idx+1) + ") \x1b[31mfailure to run test:\x1b[0m %s (\x1b[34m%s\x1b[0m)" % (error[2],error[4]) + print(str(idx + + 1) + + ") \x1b[31mfailure to run test:\x1b[0m %s (\x1b[34m%s\x1b[0m)" % + (error[2], error[4])) elif error[0] == self.NOT_FOUND: - print str(idx+1) + ") Generating reference image: '%s'" % error[2] + print(str(idx + 1) + ") Generating reference image: '%s'" % + error[2]) continue elif error[0] == self.DIFF: - print str(idx+1) + ") \x1b[34m%s different pixels\x1b[0m:\n\t%s (\x1b[31mactual\x1b[0m)\n\t%s (\x1b[32mexpected\x1b[0m)" % (error[3], error[1], error[2]) - if '.png' in error[1]: # ignore grids - sortable_errors.append((error[3],error)) + print( + str( + idx + + 1) + + ") \x1b[34m%s different pixels\x1b[0m:\n\t%s (\x1b[31mactual\x1b[0m)\n\t%s (\x1b[32mexpected\x1b[0m)" % + (error[3], + error[1], + error[2])) + if '.png' in error[1]: # ignore grids + sortable_errors.append((error[3], error)) elif error[0] == self.REPLACE: - print str(idx+1) + ") \x1b[31mreplaced reference with new version:\x1b[0m %s" % error[2] + print(str(idx + + 1) + + ") \x1b[31mreplaced reference with new version:\x1b[0m %s" % + error[2]) if len(sortable_errors): # drop failure results in folder - vdir = os.path.join(visual_output_dir,'visual-test-results') + vdir = os.path.join(visual_output_dir, 'visual-test-results') if not os.path.exists(vdir): os.makedirs(vdir) - html_template = open(os.path.join(dirname,'index.html'),'r').read() + html_template = open( + os.path.join( + dirname, + 'index.html'), + 'r').read() name = 'index.html' - failures_realpath = os.path.join(vdir,name) - html_out = open(failures_realpath,'w+') + failures_realpath = os.path.join(vdir, name) + html_out = open(failures_realpath, 'w+') sortable_errors.sort(reverse=True) html_body = '' for item in sortable_errors: @@ -237,33 +272,39 @@ def summary(self): actual = item[1][1] expected = item[1][2] diff = item[0] - actual_new = os.path.join(vdir,os.path.basename(actual)) - shutil.copy(actual,actual_new) - expected_new = os.path.join(vdir,os.path.basename(expected)) - shutil.copy(expected,expected_new) - html_body += self.make_html_item(os.path.relpath(actual_new,vdir),os.path.relpath(expected_new,vdir),diff) - html_out.write(html_template.replace('{{RESULTS}}',html_body)) - print 'View failures by opening %s' % failures_realpath + actual_new = os.path.join(vdir, os.path.basename(actual)) + shutil.copy(actual, actual_new) + expected_new = os.path.join(vdir, os.path.basename(expected)) + shutil.copy(expected, expected_new) + html_body += self.make_html_item( + os.path.relpath( + actual_new, vdir), os.path.relpath( + expected_new, vdir), diff) + html_out.write(html_template.replace('{{RESULTS}}', html_body)) + print('View failures by opening %s' % failures_realpath) return 1 + def render(filename, config, scale_factor, reporting): m = mapnik.Map(*config['sizes'][0]) try: mapnik.load_map(m, os.path.join(dirname, "styles", filename), True) - if not (m.parameters['status'] if ('status' in m.parameters) else config['status']): + if not (m.parameters['status'] if ( + 'status' in m.parameters) else config['status']): return - except Exception, e: + except Exception as e: if 'Could not create datasource' in str(e) \ or 'Bad connection' in str(e): return m reporting.other_error(filename, repr(e)) return m - sizes = config['sizes']; + sizes = config['sizes'] if 'sizes' in m.parameters: - sizes = [[int(i) for i in size.split(',')] for size in m.parameters['sizes'].split(';')] + sizes = [[int(i) for i in size.split(',')] + for size in m.parameters['sizes'].split(';')] for size in sizes: m.width, m.height = size @@ -279,11 +320,11 @@ def render(filename, config, scale_factor, reporting): for renderer in renderers: if config.get(renderer['name'], True): expected = os.path.join(dirname, renderer['dir'], '%s-%s-reference.%s' % - (postfix, renderer['name'], renderer['filetype'])) + (postfix, renderer['name'], renderer['filetype'])) actual = os.path.join(visual_output_dir, '%s-%s.%s' % - (postfix, renderer['name'], renderer['filetype'])) + (postfix, renderer['name'], renderer['filetype'])) if not quiet: - print "\"%s\" with %s..." % (postfix, renderer['name']), + print("\"%s\" with %s..." % (postfix, renderer['name'])) try: renderer['render'](m, actual, scale_factor) if not os.path.exists(expected): @@ -294,28 +335,32 @@ def render(filename, config, scale_factor, reporting): reporting.result_fail(actual, expected, diff) else: reporting.result_pass(actual, expected, diff) - except Exception, e: + except Exception as e: reporting.other_error(expected, repr(e)) return m if __name__ == "__main__": if '-q' in sys.argv: - quiet = True - sys.argv.remove('-q') + quiet = True + sys.argv.remove('-q') else: - quiet = False + quiet = False if '--overwrite' in sys.argv: - overwrite_failures = True - sys.argv.remove('--overwrite') + overwrite_failures = True + sys.argv.remove('--overwrite') else: - overwrite_failures = False + overwrite_failures = False files = None if len(sys.argv) > 1: files = [name + ".xml" for name in sys.argv[1:]] else: - files = [os.path.basename(file) for file in glob.glob(os.path.join(dirname, "styles/*.xml"))] + files = [ + os.path.basename(file) for file in glob.glob( + os.path.join( + dirname, + "styles/*.xml"))] if not os.path.exists(visual_output_dir): os.makedirs(visual_output_dir)