diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..20469d298 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,8 @@ +[run] +branch = False +source = + import_export +parallel = True +omit = + __init__.py + _version.py diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..0c2f6c8ef --- /dev/null +++ b/.editorconfig @@ -0,0 +1,26 @@ +# https://editorconfig.org/ + +root = true + +[*] +indent_style = space +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true +end_of_line = lf +charset = utf-8 + +# Docstrings and comments use max_line_length = 79 +[*.py] +max_line_length = 88 + +# Use 2 spaces for the HTML files +[*.html] +indent_size = 2 + +# Makefiles always use tabs for indentation +[Makefile] +indent_style = tab + +[*.yml] +indent_size = 2 \ No newline at end of file diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..2fd6d1cca --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +exclude = build,.git,.tox +extend-ignore = E203 +max-line-length = 88 diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..9cfff9d7f --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,7 @@ +# Run this command to always ignore formatting commits in `git blame` +# git config blame.ignoreRevsFile .git-blame-ignore-revs +# https://www.stefanjudis.com/today-i-learned/how-to-exclude-commits-from-git-blame/ + +# Reformat files according to linting rules (#1577) +dc025e1c3dfea741120c11188fc7f8959df79e77 + diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..e9e6c6af4 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,5 @@ +# These are supported funding model platforms + +github: [django-import-export] +open_collective: django-import-export + diff --git a/.github/workflows/django-import-export-ci.yml b/.github/workflows/django-import-export-ci.yml deleted file mode 100644 index 2372ff2d0..000000000 --- a/.github/workflows/django-import-export-ci.yml +++ /dev/null @@ -1,139 +0,0 @@ -name: django-import-export CI - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - test: - runs-on: ubuntu-latest - env: - USERNAME: testuser - PASSWD: ${{ secrets.TEST_PASSWD }} - strategy: - max-parallel: 4 - matrix: - db: [ sqlite, postgres, mysql ] - python-version: [ 3.6, 3.7, 3.8, 3.9, "3.10" ] - django-version: [ 2.2, 3.0, 3.1, 3.2, 4.0, main ] - include: - - db: postgres - db_port: 5432 - - db: mysql - db_port: 3306 - exclude: - - django-version: main - python-version: 3.6 - - django-version: main - python-version: 3.7 - - django-version: 4.0 - python-version: 3.6 - - django-version: 4.0 - python-version: 3.7 - - django-version: 2.2 - python-version: "3.10" - - django-version: 3.0 - python-version: "3.10" - - django-version: 3.1 - python-version: "3.10" - - django-version: 3.2 - python-version: "3.10" - services: - mysql: - image: mysql:8.0 - env: - IMPORT_EXPORT_TEST_TYPE: mysql-innodb - IMPORT_EXPORT_MYSQL_USER: ${{ env.TESTUSER }} - IMPORT_EXPORT_MYSQL_PASSWORD: ${{ env.PASSWD }} - MYSQL_USER: ${{ env.TESTUSER }} - MYSQL_PASSWORD: ${{ env.IMPORT_EXPORT_MYSQL_PASSWORD }} - MYSQL_ROOT_PASSWORD: root - MYSQL_DATABASE: import_export - ports: - - 3306:3306 - options: >- - --health-cmd="mysqladmin ping" - --health-interval=10s - --health-timeout=5s - --health-retries=3 - postgres: - image: postgres - env: - IMPORT_EXPORT_TEST_TYPE: postgres - IMPORT_EXPORT_POSTGRESQL_USER: postgres - IMPORT_EXPORT_POSTGRESQL_PASSWORD: ${{ env.PASSWD }} - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: import_export - ports: - - 5432:5432 - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - - steps: - - name: Check out repository code - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Run isort checks - uses: jamescurtin/isort-action@master - with: - sortPaths: "import_export tests" - configuration: "--check-only" - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements/base.txt - pip install -r requirements/test.txt - - if: matrix.django-version != 'main' - name: Upgrade Django version (release) - run: | - python -m pip install "Django~=${{ matrix.django-version }}.0" - - if: matrix.django-version == 'main' - name: Upgrade Django version (main) - run: | - python -m pip install "https://github.com/django/django/archive/main.tar.gz" - - name: List versions - run: | - echo "Python ${{ matrix.python-version }} -> Django ${{ matrix.django-version }}" - python --version - echo "Django `django-admin --version`" - - name: Run Tests - env: - DB: ${{ matrix.db }} - DB_HOST: 127.0.0.1 - DB_PORT: ${{ matrix.db_port }} - DB_PASSWORD: ${{ env.PASSWD }} - run: >- - PYTHONPATH=".:tests:$PYTHONPATH" python - -W error::DeprecationWarning -W error::PendingDeprecationWarning - -m coverage run --omit='setup.py,./tests/*,./import_export/locale/*' - --source=. tests/manage.py test core --settings= - - name: Upload coverage data to coveralls.io - run: coveralls --service=github - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_FLAG_NAME: ${{ matrix.db }}-${{ matrix.django-version }}-${{ matrix.python-version }} - COVERALLS_PARALLEL: true - - coveralls: - name: Indicate completion to coveralls.io - needs: test - runs-on: ubuntu-latest - container: python:3-slim - steps: - - name: Finished - run: | - pip3 install --upgrade coveralls - coveralls --service=github --finish - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..6a5dab36f --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,75 @@ +name: Release + +on: + release: + types: + - published + +jobs: + build: + name: Build Distribution 📦 + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install pypa/build + run: >- + SETUPTOOLS_SCM_DEBUG=1 + python -m + pip install + build + --user + - name: Build a binary wheel and a source tarball + run: python -m build + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ + + publish-to-pypi: + name: >- + Publish Python 🐍 distribution 📦 to PyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/django-import-export + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-to-testpypi: + name: Publish Python 🐍 distribution 📦 to TestPyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/django-import-export + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..165408446 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,91 @@ +name: test + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + runs-on: ubuntu-latest + env: + DB_NAME: import_export + IMPORT_EXPORT_POSTGRESQL_USER: postgres + IMPORT_EXPORT_POSTGRESQL_PASSWORD: somepass + IMPORT_EXPORT_MYSQL_USER: root + IMPORT_EXPORT_MYSQL_PASSWORD: root + strategy: + fail-fast: true + matrix: + python-version: + - '3.9' + - '3.10' + - '3.11' + - '3.12' + - '3.13' + services: + postgres: + image: postgres + env: + POSTGRES_USER: ${{ env.IMPORT_EXPORT_POSTGRESQL_USER }} + POSTGRES_PASSWORD: ${{ env.IMPORT_EXPORT_POSTGRESQL_PASSWORD }} + POSTGRES_DB: ${{ env.DB_NAME }} + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - name: Set up MySQL + run: > + sudo /etc/init.d/mysql start + + mysql -e 'CREATE DATABASE ${{ env.DB_NAME }};' + -u${{ env.IMPORT_EXPORT_MYSQL_USER }} + -p${{ env.IMPORT_EXPORT_MYSQL_PASSWORD }} + - name: Check out repository code + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Dependencies + run: | + python -m pip install --upgrade pip + pip install tox coverage coveralls + - name: Run tox targets for ${{ matrix.python-version }} (sqlite) + run: tox run -f py$(echo ${{ matrix.python-version }} | tr -d .) + - name: Run tox targets for ${{ matrix.python-version }} (postgres) + run: tox run -f py$(echo ${{ matrix.python-version }} | tr -d .) + env: + IMPORT_EXPORT_TEST_TYPE: postgres + - name: Run tox targets for ${{ matrix.python-version }} (mysql) + run: tox run -f py$(echo ${{ matrix.python-version }} | tr -d .) + env: + IMPORT_EXPORT_TEST_TYPE: mysql-innodb + - name: Combine test coverage + run: coverage combine + - name: Upload coverage data to coveralls.io + run: coveralls --service=github + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_FLAG_NAME: ${{ matrix.python-version }} + COVERALLS_PARALLEL: true + + coveralls: + name: Indicate completion to coveralls.io + needs: test + runs-on: ubuntu-latest + container: python:3-slim + steps: + - name: Finished + run: | + pip3 install --upgrade coveralls + coveralls --service=github --finish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 18322dcc8..29c64046e 100644 --- a/.gitignore +++ b/.gitignore @@ -9,8 +9,16 @@ dist/ *.egg-info/ .tox/ .idea/ +.venv/ *.python-version -.coverage +.coverage* *.sw[po] +.DS_Store + +# IDE support +.vscode tests/database.db + +# generated by setuptools-scm +import_export/_version.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..b398a5271 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +repos: + - repo: https://github.com/adamchainz/django-upgrade + rev: 1.23.1 + hooks: + - id: django-upgrade + args: [--target-version, "4.2"] + - repo: https://github.com/asottile/pyupgrade + rev: v3.19.1 + hooks: + - id: pyupgrade + args: [--py39-plus] + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 25.1.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/isort + rev: 6.0.0 + hooks: + - id: isort + - repo: https://github.com/PyCQA/flake8 + rev: 7.1.2 + hooks: + - id: flake8 + additional_dependencies: + - flake8-comprehensions==3.14.0 diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..599b247bb --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,16 @@ +version: 2 + +build: + os: "ubuntu-22.04" + tools: + python: "3.11" + +sphinx: + configuration: docs/conf.py + +python: + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/AUTHORS b/AUTHORS index b725be70b..7840acd8b 100644 --- a/AUTHORS +++ b/AUTHORS @@ -121,3 +121,42 @@ The following is a list of much appreciated contributors: * striveforbest (Alex Zagoro) * josx (José Luis Di Biase) * Jan Rydzewski +* rpsands (Ryan P. Sands) +* 2ykwang (Yeongkwang Yang) +* KamilRizatdinov (Kamil Rizatdinov) +* Mark Walker +* shimakaze-git +* frgmt +* vanschelven (Klaas van Schelven) +* HaPyTeX (Willem Van Onsem) +* nikhaldi (Nik Haldimann) +* TheRealVizard (Eduardo Leyva) +* 1gni5 (Jules Ducange) +* mpasternak (Michał Pasternak) +* nikatlas (Nikos Atlas) +* cocorocho (Erkan Çoban) +* bdnettleton (Brian Nettleton) +* Ptosiek (Antonin) +* samupl (Jakub Szafrański) +* smunoz-ml (Santiago Muñoz) +* carlosal0ns0 (Carlos Alonso) +* travenin (Lauri Virtanen) +* christophehenry (Christophe Henry) +* bgelov (Oleg Belov) +* EricOuma (Eric Ouma) +* ZibingZhang (Zibing Zhang) +* Glay00 (Gleb Gorelov) +* PrashansaChaudhary (Prashansa Chaudhary) +* Vedang Barhate (bvedang) +* RobTilton (Robert Tilton) +* ulliholtgrave +* mishka251 (Mikhail Belov) +* jhthompson (Jeremy Thompson) +* thisisumurzakov (Akbarbek Umurzakov) +* roharvey (Rob Harvey) +* RenDelaCruz (Ren de la Cruz) +* 19greg96 (Gergely Karz) +* AyushDharDubey +* dahvo (David Mark Awad) +* jurrian +* merwok diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3ec1b5c33..0d688621c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,78 +1,5 @@ -Contributing -============ +# Contributing -django-import-export is open-source and, as such, grows (or shrinks) & improves in part -due to the community. Below are some guidelines on how to help with the project. +Thanks for the interest! See the [contributor documentation][contribute] to get started. -By contributing you agree to abide by the [Contributor Code of Conduct][coc] - -Philosophy ----------- - -* django-import-export is BSD-licensed. All contributed code must be either - * the original work of the author, contributed under the BSD, or... - * work taken from another project released under a BSD-compatible license. -* GPL'd (or similar) works are not eligible for inclusion. -* django-import-export's git master branch should always be stable, production-ready & - passing all tests. - - -Guidelines For Reporting An Issue/Feature ------------------------------------------ - -So you've found a bug or have a great idea for a feature. Here's the steps you -should take to help get it added/fixed in django-import-export: - -* First, check to see if there's an existing issue/pull request for the - bug/feature. All issues are at https://github.com/django-import-export/django-import-export/issues - and pull reqs are at https://github.com/django-import-export/django-import-export/pulls. -* If there isn't one there, please file an issue. The ideal report includes: - * A description of the problem/suggestion. - * How to recreate the bug. - * If relevant, including the versions of your: - * Python interpreter - * Django - * tablib version - * django-import-export - * Optionally of the other dependencies involved - * Ideally, creating a pull request with a (failing) test case demonstrating - what's wrong. This makes it easy for us to reproduce & fix the problem. - - -Guidelines For Contributing Code --------------------------------- - -If you're ready to take the plunge & contribute back some code/docs, the -process should look like: - -* Fork the project on GitHub into your own account. -* Clone your copy of django-import-export. -* Make a new branch in git & commit your changes there. -* Push your new branch up to GitHub. -* Again, ensure there isn't already an issue or pull request out there on it. - If there is & you feel you have a better fix, please take note of the issue - number & mention it in your pull request. -* Create a new pull request (based on your branch), including what the - problem/feature is, versions of your software & referencing any related - issues/pull requests. - -In order to be merged into django-import-export, contributions must have the following: - -* A solid patch that: - * is clear. - * works across all supported versions of Python/Django. - * follows the existing style of the code base (mostly PEP-8). - * comments included as needed to explain why the code functions as it does -* A test case that demonstrates the previous flaw that now passes - with the included patch. -* If it adds/changes a public API, it must also include documentation - for those changes. -* Must be appropriately licensed (see [Philosophy](#philosophy)). -* Adds yourself to the AUTHORS file. - -If your contribution lacks any of these things, they will have to be added -by a core contributor before being merged into django-import-export proper, which may take -substantial time for the all-volunteer team to get to. - - -[coc]: https://github.com/django-import-export/django-import-export/blob/master/CODE_OF_CONDUCT.md +[contribute]: https://django-import-export.readthedocs.io/en/latest/contributing.html diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index cbdf17f7e..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,6 +0,0 @@ -include LICENSE -include AUTHORS -include README.rst -recursive-include import_export/templates * -recursive-include import_export/locale * -recursive-include import_export/static * diff --git a/Makefile b/Makefile index 75c351c95..5c4f480e0 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -.PHONY: clean-pyc clean-build release docs help +.PHONY: clean-pyc clean-build docs help .PHONY: lint test coverage test-codecov .DEFAULT_GOAL := help -RUN_TEST_COMMAND=PYTHONPATH=".:tests:${PYTHONPATH}" django-admin test core --settings=settings +RUN_TEST_COMMAND=PYTHONPATH=".:tests:${PYTHONPATH}" python -W error -m django test core --settings=settings help: @grep '^[a-zA-Z]' $(MAKEFILE_LIST) | sort | awk -F ':.*?## ' 'NF==2 {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}' @@ -11,6 +11,8 @@ clean-build: ## remove build artifacts rm -fr build/ rm -fr dist/ rm -fr *.egg-info + rm -f import_export/_version.py + rm -fr .coverage.* clean-pyc: ## remove Python file artifacts find . -name '*.pyc' -exec rm -f {} + @@ -22,39 +24,36 @@ clean-tests: ## remove pytest artifacts rm -fr htmlcov/ rm -fr django-import-export/ -lint: ## check style with isort - isort --check-only . - -test: ## run tests quickly with the default Python +test: ## run tests with the default Python $(RUN_TEST_COMMAND) +testp: ## run tests in parallel with the default Python + $(RUN_TEST_COMMAND) --parallel + messages: ## generate locale file translations - cd import_export && django-admin makemessages -a && django-admin compilemessages && cd .. + cd import_export && django-admin makemessages --add-location=file -a && django-admin compilemessages && cd .. coverage: ## generates codecov report - coverage run --omit='setup.py,tests/*' --source=. tests/manage.py test core --settings= + coverage run tests/manage.py test core + coverage combine coverage report sdist: clean ## package python setup.py sdist ls -l dist -release: clean install-deploy-requirements sdist ## package and upload a release - fullrelease - install-base-requirements: ## install package requirements - pip install -r requirements/base.txt + pip install . install-test-requirements: ## install requirements for testing - pip install -r requirements/test.txt - -install-deploy-requirements: ## install requirements for deployment - pip install -r requirements/deploy.txt + pip install .[tests] install-docs-requirements: ## install requirements for docs - pip install -r requirements/docs.txt + pip install --editable .[docs] -install-requirements: install-base-requirements install-test-requirements install-deploy-requirements install-docs-requirements +install-requirements: install-base-requirements install-test-requirements install-docs-requirements -build-html-doc: ## builds the project documentation in HTML format +## builds the project documentation in HTML format +## run `pip install -e .` first if running locally +build-html-doc: DJANGO_SETTINGS_MODULE=tests.settings make html -C docs diff --git a/README.rst b/README.rst index ed1977334..d0d45d314 100644 --- a/README.rst +++ b/README.rst @@ -2,74 +2,128 @@ django-import-export ==================== -.. image:: https://github.com/django-import-export/django-import-export/actions/workflows/django-import-export-ci.yml/badge.svg - :target: https://github.com/django-import-export/django-import-export/actions/workflows/django-import-export-ci.yml +.. |build| image:: https://github.com/django-import-export/django-import-export/actions/workflows/release.yml/badge.svg + :target: https://github.com/django-import-export/django-import-export/actions/workflows/release.yml :alt: Build status on Github -.. image:: https://coveralls.io/repos/github/django-import-export/django-import-export/badge.svg?branch=main +.. |coveralls| image:: https://coveralls.io/repos/github/django-import-export/django-import-export/badge.svg?branch=main :target: https://coveralls.io/github/django-import-export/django-import-export?branch=main -.. image:: https://img.shields.io/pypi/v/django-import-export.svg +.. |pypi| image:: https://img.shields.io/pypi/v/django-import-export.svg :target: https://pypi.org/project/django-import-export/ :alt: Current version on PyPi -.. image:: http://readthedocs.org/projects/django-import-export/badge/?version=stable +.. |docs| image:: http://readthedocs.org/projects/django-import-export/badge/?version=stable :target: https://django-import-export.readthedocs.io/en/stable/ :alt: Documentation -.. image:: https://img.shields.io/pypi/pyversions/django-import-export +.. |pyver| image:: https://img.shields.io/pypi/pyversions/django-import-export :alt: PyPI - Python Version -.. image:: https://img.shields.io/pypi/djversions/django-import-export +.. |djangover| image:: https://img.shields.io/pypi/djversions/django-import-export :alt: PyPI - Django Version -django-import-export is a Django application and library for importing -and exporting data with included admin integration. +.. |downloads| image:: https://static.pepy.tech/personalized-badge/django-import-export?period=month&units=international_system&left_color=black&right_color=blue&left_text=Downloads/month + :target: https://pepy.tech/project/django-import-export + +.. |xfollow| image:: https://img.shields.io/twitter/url/https/twitter.com/django_import.svg?style=social&label=Follow%20%40django_import + :alt: Follow us on X + :target: https://twitter.com/django_import + +.. |discord| image:: https://img.shields.io/discord/1240294048653119508?style=flat + :alt: Discord + +|build| |coveralls| |pypi| |docs| |pyver| |djangover| |downloads| |xfollow| |discord| + +Introduction +============ + +Straightforward, reliable and comprehensive file import / export for your Django application. + +*django-import-export* is an application and library which lets you manage import / export from / to a variety of sources (csv, xlsx, json etc). + +Can be run programmatically, or with optional integration with the Django Admin site: + +.. + source of this video uploaded to this issue comment: + https://github.com/django-import-export/django-import-export/pull/1833#issuecomment-2118777440 + +https://github.com/django-import-export/django-import-export/assets/6249838/ab56d8ba-c307-4bdf-8fa9-225669c72b37 + +`Screenshots `_ + +Features +======== + +* Import / export via `Admin UI Integration `_ or `programmatically `_ +* Import to and from a variety of file formats (csv, json, xlsx, pandas, HTML, YAML... and anything else that `tablib `_ supports) +* `Preview `_ data before importing in Admin UI +* Support for `bulk import `_ +* Handles `CRUD (and 'skip') operations during import `_ +* Flexible handling of `foreign key `_ relationships +* `Many-to-many relationship `_ support +* `Validation `_ of imported data +* Define custom `transformations `_ for exported data +* Import / export the same model instance as `different views `_ +* Export using `natural keys `__ for portability between environments +* `Select items for export `_ via the Admin UI object list +* `Select fields for export `_ via the export form +* `Export single object instances `_ +* Use `django permissions `_ to control import / export authorization +* Internationalization support +* Based on `tablib `__ +* Support for MySQL / PostgreSQL / SQLite +* Extensible - `add custom logic to control import / export `_ +* Handle import from various character encodings +* `Celery `_ integration +* Test locally with `Docker `_ +* Comprehensive `documentation `__ +* `Extensible API `_ +* test coverage :100: +* Supports dark mode :rocket: + +Example use-cases +================= + +*django-import-export* is designed to be extensible and can be used to support a variety of operations. +Here are some examples of how it has been used in the wild: + +* Configure external cron jobs to run an import or export at set times +* Use `permissions `_ to define a subset of users able to import and export project data +* Safely update project reference data by importing from version controlled csv +* Create portable data to transfer between environments using `natural keys `_ +* Manage user access to an application by importing externally version controlled auth user lists +* Add `hooks `_ to anonymize data on export +* `Modify import / export UI forms `_ to add dynamic filtering on import / export. +* Build a migration layer between platforms, for example take a `Wordpress `_ export and import to `Wagtail `_ + +Getting started +=============== + +* `Installation `_ +* `Getting started `__ +* `Example application `_ + +Help and support +================ + +* `Documentation `_ +* `FAQ `_ +* `Getting help `_ +* `Contributing `_ +* Become a `sponsor `_ +* Join our `discord `_ +* Tutorial videos on `YouTube `_ +* `Raise a security issue `_ + +Commercial support +================== + +Commercial support is provided by `Bellaport Systems Ltd `_ + +Releases +======== + +* `Release notes `_ +* `Changelog `_ -Features: - -* support multiple formats (Excel, CSV, JSON, ... - and everything else that `tablib`_ supports) - -* admin integration for importing - -* preview import changes - -* admin integration for exporting - -* export data respecting admin filters - -.. image:: docs/_static/images/django-import-export-change.png - - -* Documentation: https://django-import-export.readthedocs.io/en/stable/ -* GitHub: https://github.com/django-import-export/django-import-export/ -* Free software: BSD license -* PyPI: https://pypi.org/project/django-import-export/ - -Example app ------------ - -To run the demo app:: - - cd tests - ./manage.py makemigrations - ./manage.py migrate - ./manage.py createsuperuser - ./manage.py loaddata category book - ./manage.py runserver - -Contribute ----------- - -If you'd like to contribute, simply fork `the repository`_, commit your -changes to the **develop** branch (or branch off of it), and send a pull -request. Make sure you add yourself to AUTHORS_. - -As most projects, we try to follow PEP8_ as closely as possible. Please bear -in mind that most pull requests will be rejected without proper unit testing. - -.. _`PEP8`: https://www.python.org/dev/peps/pep-0008/ -.. _`tablib`: https://github.com/jazzband/tablib -.. _`the repository`: https://github.com/django-import-export/django-import-export/ -.. _AUTHORS: https://github.com/django-import-export/django-import-export/blob/master/AUTHORS diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 000000000..8b9517110 --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,76 @@ +## Release process + +Pull requests automatically have [pre-commit](https://pre-commit.com/) checks applied via +the [pre-commit.ci](https://pre-commit.ci/) Github application. +These checks will run automatically once the Github application is installed. +The checks run off the `.pre-commit-config.yaml` file, and that file can be used to apply +additional config to the CI process. + +### Pre-release + +Ensure that `changelog.rst` is up-to-date with the correct version number and release date. + +It's sensible to perform a clean installation of the package and ensure the server runs ok. +This can avoid issues with broken imports which may not have been picked up by integration tests. + +``` +python -m venv venv +pip install django-import-export +tests/manage.py runserver +``` + +Now browse http://localhost:8000 and test that the site runs ok. + +### Compile translations + +- `make messages` is intended to be run now to keep the translation files up-to-date. + - Run this if there have been any translations updates for the release. It is recommended to run this prior to any minor release. + - This creates updates to all translation files so there is no need to commit these unless there have been any translation changes. + - If 'no module named settings' error is seen, try unsetting `DJANGO_SETTINGS_MODULE` environment variable. + +### Perform the release + +To create a new published release, follow the instructions [here](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository). +Ensure you create the new tag to correspond with the release as required. + +Always build the release from `main` because ReadTheDocs builds documentation from `main`, +so if another branch is used, then the documentation will be incomplete. + +1. Go to the [Releases](https://github.com/django-import-export/django-import-export/releases) page +2. Click 'Draft a new release' +3. Choose or create a new tag +4. Choose the desired branch (if not `main`) +5. Check 'Set as a pre-release' or 'Set as the latest release' as appropriate +6. Generate release notes if desired. +7. Click 'Publish release' + +The `release` github workflow will run and publish the release binaries to both test.pypi.org and pypi.org. + +### Check readthedocs + +[readthedocs](https://readthedocs.org/projects/django-import-export/) integration is used to publish documentation. +The webhook endpoint on readthedocs is configured using +[these instructions](https://docs.readthedocs.io/en/latest/guides/setup/git-repo-manual.html). + +This is implemented using a Webhook defined in the Github repo (Settings / Webhooks). + +readthedocs should be checked after each release to ensure that the docs have built correctly. +Login to [readthedocs.org](https://readthedocs.org) to check that the build ran OK (click on 'Builds' tab). + +For pre-releases, the release version has to be activated via the readthedocs UI before it can be built. + +### Troubleshooting + +The build can fail on 'publish to PyPI' with errors such as: + +``` +`long_description` has syntax errors in markup and would not be rendered on PyPI. +``` + +This is because the README.rst contains syntax errors and cannot be rendered. You can check this with: + +``` +pip install readme_renderer +python setup.py check -r -s +``` +If there are duplicate target names, you can correct this with [underscores](https://github.com/sphinx-doc/sphinx/issues/3921#issuecomment-315581557). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..a90173096 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +## Reporting a Vulnerability + + **Please report security issues by emailing djangoimportexport@gmail.com**. + + The project maintainers will then work with you to resolve any issues where required, prior to any public disclosure. diff --git a/docs/_static/images/change-form-export.png b/docs/_static/images/change-form-export.png new file mode 100644 index 000000000..6912c4299 Binary files /dev/null and b/docs/_static/images/change-form-export.png differ diff --git a/docs/_static/images/custom-export-form.png b/docs/_static/images/custom-export-form.png new file mode 100644 index 000000000..b358966da Binary files /dev/null and b/docs/_static/images/custom-export-form.png differ diff --git a/docs/_static/images/custom-import-form.png b/docs/_static/images/custom-import-form.png new file mode 100644 index 000000000..206c3b22f Binary files /dev/null and b/docs/_static/images/custom-import-form.png differ diff --git a/docs/_static/images/date-widget-validation-error.png b/docs/_static/images/date-widget-validation-error.png new file mode 100644 index 000000000..00239b3c5 Binary files /dev/null and b/docs/_static/images/date-widget-validation-error.png differ diff --git a/docs/_static/images/django-import-export-action.png b/docs/_static/images/django-import-export-action.png deleted file mode 100644 index 701652a62..000000000 Binary files a/docs/_static/images/django-import-export-action.png and /dev/null differ diff --git a/docs/_static/images/django-import-export-export-confirm.png b/docs/_static/images/django-import-export-export-confirm.png new file mode 100644 index 000000000..3375d1d67 Binary files /dev/null and b/docs/_static/images/django-import-export-export-confirm.png differ diff --git a/docs/_static/images/export-button.png b/docs/_static/images/export-button.png new file mode 100644 index 000000000..4635aa715 Binary files /dev/null and b/docs/_static/images/export-button.png differ diff --git a/docs/_static/images/export_workflow.svg b/docs/_static/images/export_workflow.svg new file mode 100644 index 000000000..81055ab44 --- /dev/null +++ b/docs/_static/images/export_workflow.svg @@ -0,0 +1 @@ +%23%20see%20import_workflow.txt%0A%0Aparticipant%20Resource%0Aparticipant%20Field%0Aparticipant%20Widget%0Aparticipant%20tablib.Dataset%0A%0AResource-%3EResource%3A%22%22export(queryset%3DNone%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Resource%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22before_export(queryset%3DNone%2C%20%5C*%5C*kwargs)%22%22%0A%0Anote%20over%20Resource%3A%20A%20Queryset%20instance%20can%20be%20passed%20into%20export().%5CnIf%20no%20Queryset%20is%20passed%2C%20get_queryset()%20is%20called.%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22get_queryset()%22%22%0Aactivate%20Resource%20%23lightblue%0AResource%3C--Resource%3A%22%22Queryset%22%22%0Adeactivate%20Resource%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22filter_export(queryset%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Resource%20%23lightblue%0AResource%3C--Resource%3A%22%22Queryset%22%22%0Adeactivate%20Resource%0A%0Aloop%20%23pink%20each%20row%20in%20Queryset%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22export_resource(instance)%22%22%0Aactivate%20Resource%20%23lightblue%0A%0Aloop%20%23green%20each%20field%20in%20export%20field%20list%0AResource-%3EResource%3A%22%22export_field(field%2C%20instance)%22%22%0Aactivate%20Resource%20%23lightpink%0AResource-%3EField%3A%22%22export(instance)%22%22%0Aactivate%20Field%20%23lightblue%0Anote%20over%20Field%3A%20An%20optional%20callable%20can%20be%20defined%20instead%20of%20export().%5Cn%20See%20'dehydrate'%20methods%20in%20docs.%0AField-%3EField%3A%22%22get_value(instance)%22%22%0Aactivate%20Field%20%23lightpink%0Anote%20over%20Field%3A%20Get%20the%20field's%20value%20from%20the%20instance.%0AField%3C--Field%3A%22%22%3C%3Cvalue%3E%3E%22%22%0AField-%3EWidget%3A%22%22render(value)%22%22%0Aactivate%20Widget%0Anote%20over%20Widget%3A%20Format%20field%20value%20into%20a%5Cnstring%20or%20value%20as%20required.%0AField%3C--Widget%3A%22%22%3C%3Cvalue%3E%3E%0Adeactivate%20Widget%0Adeactivate%20Field%0AResource%3C--Field%3A%22%22%3C%3Cvalue%3E%3E%22%22%0Adeactivate%20Field%0Adeactivate%20Resource%0Aend%0Adeactivate%20Resource%0A%0AResource-%3Etablib.Dataset%3A%22%22append()%22%22%0Aend%0A%0AResource%3C--Resource%3A%22%22%3C%3CDataset%3E%3E%22%22%0Adeactivate%20Resource%0A%0A%0AResourceFieldWidgettablib.Datasetexport(queryset=None, **kwargs)before_export(queryset=None, **kwargs)A Queryset instance can be passed into export().If no Queryset is passed, get_queryset() is called.get_queryset()Querysetfilter_export(queryset, **kwargs)Querysetexport_resource(instance)export_field(field, instance)export(instance)An optional callable can be defined instead of export(). See 'dehydrate' methods in docs.get_value(instance)Get the field's value from the instance.<<value>>render(value)Format field value into astring or value as required.<<value>><<value>>append()<<Dataset>>loop[each row in Queryset]loop[each field in export field list] \ No newline at end of file diff --git a/docs/_static/images/import-button.png b/docs/_static/images/import-button.png new file mode 100644 index 000000000..5c9c08f14 Binary files /dev/null and b/docs/_static/images/import-button.png differ diff --git a/docs/_static/images/import_workflow.svg b/docs/_static/images/import_workflow.svg new file mode 100644 index 000000000..032a6cf39 --- /dev/null +++ b/docs/_static/images/import_workflow.svg @@ -0,0 +1 @@ +%23%20sequencediagram.org%0A%0Aparticipant%20Resource%0Aparticipant%20Result%0Aparticipant%20RowResult%0Aparticipant%20InstanceLoader%0Aparticipant%20Field%0Aparticipant%20Widget%0A%0AResource-%3EResource%3A%22%22import_data(data%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Resource%0AResource-%3EResult%3A%22%22__init__()%22%22%0Aactivate%20Result%0AResource%3C--Result%3A%22%22Result%22%22%0Adeactivate%20Result%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22before_import(dataset%2C%20%5C*%5C*kwargs)%22%22%0A%0Aloop%20%23pink%20each%20row%20in%20dataset%0AResource-%3EResource%3A%22%22import_row(row%2C%20instance_loader%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Resource%20%23lightblue%0AResource-%3ERowResult%3A%22%22__init__()%22%22%0Aactivate%20RowResult%0AResource%3C--RowResult%3A%22%22RowResult%22%22%0Adeactivate%20RowResult%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22before_import_row(row%2C%20%5C*%5C*kwargs)%22%22%0A%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22get_or_init_instance(instance_loader%2C%20%5Cn%20%20%20%20row)%22%22%0Aactivate%20Resource%20%23lightgrey%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22get_instance(instance_loader%2C%20row)%22%22%0Aactivate%20Resource%20%23lightgreen%0AResource-%3EInstanceLoader%3A%22%22get_instance(row)%22%22%0Aactivate%20InstanceLoader%0Anote%20over%20InstanceLoader%3A%20Existing%20Instance%20is%20returned%20if%20exists%2C%5Cn%20otherwise%20a%20new%20Instance%20is%20created.%0A%0AResource%3C--InstanceLoader%3A%22%22Instance%22%22%0Adeactivate%20InstanceLoader%0AResource--%3EResource%3A%22%22Instance%22%22%0Adeactivate%20Resource%0AResource--%3EResource%3A%22%22Instance%2C%20bool%22%22%0Adeactivate%20Resource%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22after_init_instance(instance%2C%20new%2C%20row%20%5Cn%20%20%20%5C*%5C*kwargs)%22%22%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22for_delete(row%2C%20instance)%22%22%0Aactivate%20Resource%20%23lightgrey%0Anote%20over%20Resource%3A%20If%20True%2C%20row%20is%20deleted.%0AResource--%3EResource%3A%22%22bool%22%22%0A%0Adeactivate%20Resource%0A%0AResource-%3EResource%3A%22%22import_instance(instance%2C%20row%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Resource%20%23lightgrey%0A%0Aloop%20%23green%20each%20field%20in%20row%0AResource-%3EField%3A%22%22save(instance%2C%20row%2C%20is_m2m%2C%20%5C*%5C*kwargs)%22%22%0Anote%20over%20Field%3A%20save%20logic%20determines%20the%20correct%20value%5Cnand%20sets%20attribute%20on%20instance.%0AField-%3EField%3A%22%22clean(row%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Field%0AField-%3EWidget%3A%22%22clean(value%2C%20row%2C%20%5C*%5C*kwargs)%22%22%0Aactivate%20Widget%0AField%3C--Widget%3A%22%22value%22%22%0Adeactivate%20Widget%0Adeactivate%20Field%0Aend%0A%0Adeactivate%20Resource%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22skip_row(instance%2C%20original%2C%20row%2C%20%5Cn%20%20%20%20import_validation_errors)%22%22%0Aactivate%20Resource%20%23lightgrey%0Anote%20over%20Resource%3A%20If%20True%2C%20row%20is%20skipped.%0AResource--%3EResource%3A%22%22bool%22%22%0Adeactivate%20Resource%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22validate_instance(instance%2C%20%5Cn%20%20%20import_validation_errors)%22%22%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22save_instance(instance%2C%20row%2C%20new%2C%20%5Cn%20%20%20%20%5C*%5C*kwargs)%22%22%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22save_m2m(instance%2C%20row%2C%20%5C*%5C*kwargs)%22%22%0A%0AResource-%3EResource%3A%3Cbackground%3A%23yellow%3E%22%22after_import_row(row%2C%20row_result%2C%20%5Cn%20%20%20%20%5C*%5C*kwargs)%22%22%0A%0A%0AResource--%3EResource%3A%22%22RowResult%22%22%0Adeactivate%20Resource%0Aend%0A%0AResource%3C--Resource%3A%22%22Result%22%22%0Adeactivate%20Resource%0AResourceResultRowResultInstanceLoaderFieldWidgetimport_data(data, **kwargs)__init__()Resultbefore_import(dataset, **kwargs)import_row(row, instance_loader, **kwargs)__init__()RowResultbefore_import_row(row, **kwargs)get_or_init_instance(instance_loader,     row)get_instance(instance_loader, row)get_instance(row)Existing Instance is returned if exists, otherwise a new Instance is created.InstanceInstanceInstance, boolafter_init_instance(instance, new, row    **kwargs)for_delete(row, instance)If True, row is deleted.boolimport_instance(instance, row, **kwargs)save(instance, row, is_m2m, **kwargs)save logic determines the correct valueand sets attribute on instance.clean(row, **kwargs)clean(value, row, **kwargs)valueskip_row(instance, original, row,     import_validation_errors)If True, row is skipped.boolvalidate_instance(instance,    import_validation_errors)save_instance(instance, row, new,     **kwargs)save_m2m(instance, row, **kwargs)after_import_row(row, row_result,     **kwargs)RowResultResultloop[each row in dataset]loop[each field in row] \ No newline at end of file diff --git a/docs/_static/images/non-field-specific-validation-error.png b/docs/_static/images/non-field-specific-validation-error.png new file mode 100644 index 000000000..a476f1dde Binary files /dev/null and b/docs/_static/images/non-field-specific-validation-error.png differ diff --git a/docs/_static/images/screenshots/confirm-import.png b/docs/_static/images/screenshots/confirm-import.png new file mode 100644 index 000000000..6cdf1a182 Binary files /dev/null and b/docs/_static/images/screenshots/confirm-import.png differ diff --git a/docs/_static/images/screenshots/export-form.png b/docs/_static/images/screenshots/export-form.png new file mode 100644 index 000000000..cd5f7d6a1 Binary files /dev/null and b/docs/_static/images/screenshots/export-form.png differ diff --git a/docs/_static/images/screenshots/export-selected-action.png b/docs/_static/images/screenshots/export-selected-action.png new file mode 100644 index 000000000..93def7e23 Binary files /dev/null and b/docs/_static/images/screenshots/export-selected-action.png differ diff --git a/docs/_static/images/screenshots/import-complete.png b/docs/_static/images/screenshots/import-complete.png new file mode 100644 index 000000000..c7e2b8b8c Binary files /dev/null and b/docs/_static/images/screenshots/import-complete.png differ diff --git a/docs/_static/images/screenshots/import-form.png b/docs/_static/images/screenshots/import-form.png new file mode 100644 index 000000000..11899e1e0 Binary files /dev/null and b/docs/_static/images/screenshots/import-form.png differ diff --git a/docs/_static/images/screenshots/import-update-with-authors.png b/docs/_static/images/screenshots/import-update-with-authors.png new file mode 100644 index 000000000..8927bf4ab Binary files /dev/null and b/docs/_static/images/screenshots/import-update-with-authors.png differ diff --git a/docs/_static/images/select-for-export.png b/docs/_static/images/select-for-export.png new file mode 100644 index 000000000..49984086f Binary files /dev/null and b/docs/_static/images/select-for-export.png differ diff --git a/docs/admin_integration.rst b/docs/admin_integration.rst new file mode 100644 index 000000000..91b8cfb2f --- /dev/null +++ b/docs/admin_integration.rst @@ -0,0 +1,606 @@ +.. _admin-integration: + +================= +Admin integration +================= + +One of the main features of import-export is the support for integration with the Django Admin site. +This provides a convenient interface for importing and exporting Django objects. +Refer to the `Django Admin documentation `_ for details +of how to enable and configure the admin site. + +You can also install and run the :ref:`example application` to become familiar with Admin integration. + +Integrating import-export with your application requires extra configuration. + +Admin integration is achieved by subclassing +:class:`~import_export.admin.ImportExportModelAdmin` or one of the available +mixins (:class:`~import_export.admin.ImportMixin`, +:class:`~import_export.admin.ExportMixin`, +:class:`~import_export.admin.ImportExportMixin`):: + + # app/admin.py + from django.contrib import admin + from .models import Book + from import_export.admin import ImportExportModelAdmin + + @admin.register(Book) + class BookAdmin(ImportExportModelAdmin): + resource_classes = [BookResource] + + +Once this configuration is present (and server is restarted), 'import' and 'export' buttons will be presented to the +user. +Clicking each button will open a workflow where the user can select the type of import or export. + +You can assign multiple resources to the ``resource_classes`` attribute. These resources will be presented in a select +dropdown in the UI. + +.. _change-screen-figure: + +.. figure:: _static/images/django-import-export-change.png + + A screenshot of the change view with Import and Export buttons. + +Importing +--------- + +To enable import, subclass :class:`~import_export.admin.ImportExportModelAdmin` or use +one of the available mixins, i.e. :class:`~import_export.admin.ImportMixin`, or +:class:`~import_export.admin.ImportExportMixin`. + +Enabling import functionality means that a UI button will automatically be presented on the Admin page: + +.. figure:: _static/images/import-button.png + :alt: The import button + +When clicked, the user will be directed into the import workflow. By default, import is a two step process, though +it can be configured to be a single step process (see :ref:`import_export_skip_admin_confirm`). + +The two step process is: + +1. Select the file and format for import. +2. Preview the import data and confirm import. + +.. _confirm-import-figure: + +.. figure:: _static/images/django-import-export-import.png + :alt: A screenshot of the 'import' view + + A screenshot of the 'import' view. + +.. figure:: _static/images/django-import-export-import-confirm.png + :alt: A screenshot of the 'confirm import' view + + A screenshot of the 'confirm import' view. + + +.. _import_confirmation: + +Import confirmation +------------------- + +To support import confirmation, uploaded data is written to temporary storage after +step 1 (:ref:`choose file`), and read back for final import after step 2 +(:ref:`import confirmation`). + +There are three mechanisms for temporary storage. + +#. Temporary file storage on the host server (default). This is suitable for development only. + Use of temporary filesystem storage is not recommended for production sites. + +#. The `Django cache `_. + +#. `Django storage `_. + +To modify which storage mechanism is used, please refer to the setting :ref:`import_export_tmp_storage_class`. + +Your choice of temporary storage will be influenced by the following factors: + +* Sensitivity of the data being imported. +* Volume and frequency of uploads. +* File upload size. +* Use of containers or load-balanced servers. + +Temporary resources are removed when data is successfully imported after the confirmation step. + +**For sensitive data you will need to understand exactly how temporary files are being stored and to ensure +that data is properly secured and managed.** + +.. warning:: + + If users do not complete the confirmation step of the workflow, + or if there are errors during import, then temporary resources may not be deleted. + This will need to be understood and managed in production settings. + For example, using a cache expiration policy or cron job to clear stale resources. + +.. _customizable_storage: + +Customizable storage +^^^^^^^^^^^^^^^^^^^^^ + +If using :class:`~import_export.tmp_storages.MediaStorage` as a storage module, then you can define which storage +backend implementation is used to handle create / read / delete operations on the persisted data. + +If using Django 4.2 or greater, use the `STORAGES `_ +setting to define the backend, otherwise use :ref:`import_export_default_file_storage`. + +You can either supply a path to your own custom storage backend, or use pre-existing backends such as +`django-storages `_. + +If no custom storage implementation is supplied, then the Django default handler is used. + +For example, if using django-storages, you can configure s3 as a temporary storage location with the following:: + + IMPORT_EXPORT_TMP_STORAGE_CLASS = "import_export.tmp_storages.MediaStorage" + + STORAGES = { + "default": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + }, + "import_export": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "bucket_name": "", + "region_name": "", + "access_key": "", + "secret_key": "" + }, + }, + } + +.. _format_ui_error_messages: + +How to format UI error messages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Admin UI import error messages can be formatted using the :attr:`~import_export.admin.ImportMixin.import_error_display` +attribute. + +.. _admin_ui_exporting: + +Exporting +--------- + +As with import, it is also possible to configure export functionality. + +To do this, subclass :class:`~import_export.admin.ImportExportModelAdmin` or use +one of the available mixins, i.e. :class:`~import_export.admin.ExportMixin`, or +:class:`~import_export.admin.ImportExportMixin`. + +Enabling export functionality means that a UI button will automatically be presented on the Admin page: + +.. figure:: _static/images/export-button.png + :alt: The export button + +When clicked, the user will be directed into the export workflow. + +Export is a two step process. When the 'export' button is clicked, the user will be directed to a new screen, +where 'resource', 'fields' and 'file format' can be selected. + +.. _export_confirm: + +.. figure:: _static/images/django-import-export-export-confirm.png + :alt: the export confirm page + + The export 'confirm' page. + +Once 'submit' is clicked, the export file will be automatically downloaded to the client (usually to the 'Downloads' +folder). + +It is possible to disable this extra step by setting the :ref:`import_export_skip_admin_export_ui` flag, +or by setting :attr:`~import_export.admin.ExportMixin.skip_export_form`. + +.. _export_via_admin_action: + +Exporting via Admin action +-------------------------- + +It's possible to configure the Admin UI so that users can select which items they want to export: + + +.. image:: _static/images/select-for-export.png + :alt: Select items for export + +To do this, simply declare an Admin instance which includes :class:`~import_export.admin.ExportActionMixin`:: + + class BookAdmin(ImportExportModelAdmin, ExportActionMixin): + # additional config can be supplied if required + pass + +Then register this Admin:: + + admin.site.register(Book, BookAdmin) + +Note that the above example refers specifically to the :ref:`example application`, you'll have to modify +this to refer to your own model instances. In the example application, the 'Category' model has this functionality. + +When 'Go' is clicked for the selected items, the user will be directed to the +:ref:`export 'confirm' page`. + +It is possible to disable this extra step by setting the :ref:`import_export_skip_admin_action_export_ui` or +:ref:`import_export_skip_admin_export_ui` flags, or by setting +:attr:`~import_export.admin.ExportMixin.skip_export_form_from_action` or +:attr:`~import_export.admin.ExportMixin.skip_export_form`. + +.. note:: + + If deploying to a multi-tenant environment, you may need to ensure that one set of users cannot export + data belonging to another set. To do this, filter the range of exportable items to be limited to only + those items which users should be permitted to export. + See :meth:`~import_export.admin.ExportMixin.get_export_queryset`. + +.. _export_from_model_change_form: + +Export from model instance change form +-------------------------------------- + +When :ref:`export via admin action` is enabled, then it is also possible to export from a +model instance change form: + +.. figure:: _static/images/change-form-export.png + :alt: export from change form + + Export from model instance change form + +When 'Export' is clicked, the user will be directed to the :ref:`export 'confirm' page`. + +This button can be removed from the UI by setting the +:attr:`~import_export.admin.ExportActionMixin.show_change_form_export` attribute, for example:: + + class CategoryAdmin(ExportActionModelAdmin): + show_change_form_export = False + +Customize admin import forms +---------------------------- + +It is possible to modify default import forms used in the model admin. For +example, to add an additional field in the import form, subclass and extend the +:class:`~import_export.forms.ImportForm` (note that you may want to also +consider :class:`~import_export.forms.ConfirmImportForm` as importing is a +two-step process). + +To use your customized form(s), change the respective attributes on your +``ModelAdmin`` class: + +* :attr:`~import_export.admin.ImportMixin.import_form_class` +* :attr:`~import_export.admin.ImportMixin.confirm_form_class` + +For example, imagine you want to import books and set each book to have the same Author, selected from a +dropdown. You can extend the import forms to include ``author`` field to select the author from. + +.. note:: + + Importing an E-Book using the :ref:`example application` + demonstrates this. + +.. figure:: _static/images/custom-import-form.png + + A screenshot of a customized import view. + +Customize forms (for example see ``tests/core/forms.py``):: + + class CustomImportForm(ImportForm): + author = forms.ModelChoiceField( + queryset=Author.objects.all(), + required=True) + + class CustomConfirmImportForm(ConfirmImportForm): + author = forms.ModelChoiceField( + queryset=Author.objects.all(), + required=True) + +Customize ``ModelAdmin`` (for example see ``tests/core/admin.py``):: + + class CustomBookAdmin(ImportMixin, admin.ModelAdmin): + resource_classes = [BookResource] + import_form_class = CustomImportForm + confirm_form_class = CustomConfirmImportForm + + def get_confirm_form_initial(self, request, import_form): + initial = super().get_confirm_form_initial(request, import_form) + + # Pass on the `author` value from the import form to + # the confirm form (if provided) + if import_form: + initial['author'] = import_form.cleaned_data['author'].id + return initial + + admin.site.register(EBook, CustomBookAdmin) + +In order to save the selected author along with the EBook, another couple of methods are required. +Add the following to ``CustomBookAdmin`` class (in ``tests/core/admin.py``):: + + def get_import_data_kwargs(self, request, *args, **kwargs): + """ + Prepare kwargs for import_data. + """ + form = kwargs.get("form", None) + if form and hasattr(form, "cleaned_data"): + kwargs.update({"author": form.cleaned_data.get("author", None)}) + return kwargs + +Then add the following to ``CustomBookAdmin`` class (in ``tests/core/admin.py``):: + + def after_init_instance(self, instance, new, row, **kwargs): + if "author" in kwargs: + instance.author = kwargs["author"] + +The selected author is now set as an attribute on the instance object. When the instance is saved, +then the author is set as a foreign key relation to the instance. + +Further customization +--------------------- + +To further customize the import forms, you might like to consider overriding the following +:class:`~import_export.admin.ImportMixin` methods: + +* :meth:`~import_export.admin.ImportMixin.get_import_form_class` +* :meth:`~import_export.admin.ImportMixin.get_import_form_kwargs` +* :meth:`~import_export.admin.ImportMixin.get_import_form_initial` +* :meth:`~import_export.admin.ImportMixin.get_confirm_form_class` +* :meth:`~import_export.admin.ImportMixin.get_confirm_form_kwargs` + +The parameters can then be read from ``Resource`` methods, such as: + +* :meth:`~import_export.resources.Resource.before_import` +* :meth:`~import_export.resources.Resource.before_import_row` + +.. seealso:: + + :doc:`/api_admin` + available mixins and options. + +.. _customize_admin_export_forms: + +Customize admin export forms +---------------------------- + +It is also possible to add fields to the export form so that export data can be +filtered. For example, we can filter exports by Author. + +.. figure:: _static/images/custom-export-form.png + + A screenshot of a customized export view. + +Customize forms (for example see ``tests/core/forms.py``):: + + class CustomExportForm(AuthorFormMixin, ExportForm): + """Customized ExportForm, with author field required.""" + author = forms.ModelChoiceField( + queryset=Author.objects.all(), + required=True) + +Customize ``ModelAdmin`` (for example see ``tests/core/admin.py``):: + + class CustomBookAdmin(ImportMixin, ImportExportModelAdmin): + resource_classes = [EBookResource] + export_form_class = CustomExportForm + + def get_export_resource_kwargs(self, request, **kwargs): + export_form = kwargs.get("export_form") + if export_form: + kwargs.update(author_id=export_form.cleaned_data["author"].id) + return kwargs + + admin.site.register(Book, CustomBookAdmin) + +Create a Resource subclass to apply the filter +(for example see ``tests/core/admin.py``):: + + class EBookResource(ModelResource): + def __init__(self, **kwargs): + super().__init__() + self.author_id = kwargs.get("author_id") + + def filter_export(self, queryset, **kwargs): + return queryset.filter(author_id=self.author_id) + + class Meta: + model = EBook + +In this example, we can filter an EBook export using the author's name. + +1. Create a custom form which defines 'author' as a required field. +2. Create a 'CustomBookAdmin' class which defines a + :class:`~import_export.resources.Resource`, and overrides + :meth:`~import_export.mixins.BaseExportMixin.get_export_resource_kwargs`. + This ensures that the author id will be passed to the + :class:`~import_export.resources.Resource` constructor. +3. Create a :class:`~import_export.resources.Resource` which is instantiated with the + ``author_id``, and can filter the queryset as required. + +Using multiple resources +------------------------ + +It is possible to set multiple resources both to import and export `ModelAdmin` classes. +The ``ImportMixin``, ``ExportMixin``, ``ImportExportMixin`` and ``ImportExportModelAdmin`` classes accepts +subscriptable type (list, tuple, ...) as ``resource_classes`` parameter. + +The subscriptable could also be returned from one of the following: + +* :meth:`~import_export.mixins.BaseImportExportMixin.get_resource_classes` +* :meth:`~import_export.mixins.BaseImportMixin.get_import_resource_classes` +* :meth:`~import_export.mixins.BaseExportMixin.get_export_resource_classes` + +If there are multiple resources, the resource chooser appears in import/export admin form. +The displayed name of the resource can be changed through the `name` parameter of the `Meta` class. + + +Use multiple resources:: + + from import_export import resources + from core.models import Book + + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + + + class BookNameResource(resources.ModelResource): + + class Meta: + model = Book + fields = ['id', 'name'] + name = "Export/Import only book names" + + + class CustomBookAdmin(ImportMixin, admin.ModelAdmin): + resource_classes = [BookResource, BookNameResource] + +.. _dynamically_set_resource_values: + +How to dynamically set resource values +-------------------------------------- + +There are a few use cases where it is desirable to dynamically set values in the `Resource`. For example, suppose you +are importing via the Admin console and want to use a value associated with the authenticated user in import queries. + +Suppose the authenticated user (stored in the ``request`` object) has a property called ``publisher_id``. During +import, we want to filter any books associated only with that publisher. + +First of all, override the ``get_import_resource_kwargs()`` method so that the request user is retained:: + + class BookAdmin(ImportExportMixin, admin.ModelAdmin): + # attribute declarations not shown + + def get_import_resource_kwargs(self, request, *args, **kwargs): + kwargs = super().get_resource_kwargs(request, *args, **kwargs) + kwargs.update({"user": request.user}) + return kwargs + +Now you can add a constructor to your ``Resource`` to store the user reference, then override ``get_queryset()`` to +return books for the publisher:: + + class BookResource(ModelResource): + + def __init__(self, user): + self.user = user + + def get_queryset(self): + return self._meta.model.objects.filter(publisher_id=self.user.publisher_id) + + class Meta: + model = Book + +.. _interoperability: + +Interoperability with 3rd party libraries +----------------------------------------- + +import-export extends the Django Admin interface. There is a possibility that clashes may occur with other 3rd party +libraries which also use the admin interface. + +django-admin-sortable2 +^^^^^^^^^^^^^^^^^^^^^^ + +Issues have been raised due to conflicts with setting `change_list_template `_. There is a workaround listed `here `_. +Also, refer to `this issue `_. +If you want to patch your own installation to fix this, a patch is available `here `_. + +django-polymorphic +^^^^^^^^^^^^^^^^^^ + +Refer to `this issue `_. + +template skipped due to recursion issue +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Refer to `this issue `_. + +django-debug-toolbar +^^^^^^^^^^^^^^^^^^^^ + +If you use import-export using with `django-debug-toolbar `_. +then you need to configure ``debug_toolbar=False`` or ``DEBUG=False``, +It has been reported that the the import/export time will increase ~10 times. + +Refer to `this PR `_. + +.. _admin_security: + +Security +-------- + +Enabling the Admin interface means that you should consider the security implications. Some or all of the following +points may be relevant. + +Is there potential for untrusted imports? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* What is the source of your import file? + +* Is this coming from an external source where the data could be untrusted? + +* Could source data potentially contain malicious content such as script directives or Excel formulae? + +* Even if data comes from a trusted source, is there any content such as HTML which could cause issues when rendered + in a web page? + +What is the potential risk for exported data? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* If there is malicious content in stored data, what is the risk of exporting this data? + +* Could untrusted input be executed within a spreadsheet? + +* Are spreadsheets sent to other parties who could inadvertently execute malicious content? + +* Could data be exported to other formats, such as CSV, TSV or ODS, and then opened using Excel? + +* Could any exported data be rendered in HTML? For example, csv is exported and then loaded into another + web application. In this case, untrusted input could contain malicious code such as active script content. + +You should in all cases review `Django security documentation `_ +before deploying a live Admin interface instance. + +Mitigating security risks +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Please read the following topics carefully to understand how you can improve the security of your implementation. + +Sanitize exports +"""""""""""""""" + +By default, import-export does not sanitize or process imported data. Malicious content, such as script directives, +can be imported into the database, and can be exported without any modification. + +.. note:: + + HTML content, if exported into 'html' format, will be sanitized to remove scriptable content. + This sanitization is performed by the ``tablib`` library. + +You can optionally configure import-export to sanitize Excel formula data on export. See +:ref:`IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT`. + +Enabling this setting only sanitizes data exported using the Admin Interface. +If exporting data :ref:`programmatically`, then you will need to apply your own sanitization. + +Limit formats +""""""""""""" + +Limiting the available import or export format types can be considered. For example, if you never need to support +import or export of spreadsheet data, you can remove this format from the application. + +Imports and exports can be restricted using the following settings: + +#. :ref:`IMPORT_EXPORT_FORMATS` +#. :ref:`IMPORT_FORMATS` +#. :ref:`EXPORT_FORMATS` + +Set permissions +""""""""""""""" + +Consider setting `permissions `_ to define which +users can import and export. + +#. :ref:`IMPORT_EXPORT_IMPORT_PERMISSION_CODE` +#. :ref:`IMPORT_EXPORT_EXPORT_PERMISSION_CODE` + +Raising security issues +^^^^^^^^^^^^^^^^^^^^^^^ + +Refer to `SECURITY.md `_ for +details on how to escalate security issues you may have found in import-export. diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst new file mode 100644 index 000000000..e6567fa54 --- /dev/null +++ b/docs/advanced_usage.rst @@ -0,0 +1,958 @@ +============== +Advanced usage +============== + +Customize resource options +========================== + +By default :class:`~import_export.resources.ModelResource` introspects model +fields and creates :class:`~import_export.fields.Field` attributes with an +appropriate :class:`~import_export.widgets.Widget` for each field. + +Fields are generated automatically by introspection on the declared model class. The field defines the relationship +between the resource we are importing (for example, a csv row) and the instance we want to update. Typically, the row +data will map onto a single model instance. The row data will be set onto model instance attributes (including instance +relations) during the import process. + +In a simple case, the name of the row headers will map exactly onto the names of the model attributes, and the import +process will handle this mapping. In more complex cases, model attributes and row headers may differ, and we will need +to declare explicitly declare this mapping. See :ref:`field_declaration` for more information. + +.. _declare_fields: + +Declare fields +-------------- + +You can optionally use the ``fields`` declaration to affect which fields are handled during import / export. + +To affect which model fields will be included in a resource, use the ``fields`` option to whitelist fields:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + fields = ('id', 'name', 'price',) + +Or the ``exclude`` option to blacklist fields:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + exclude = ('imported', ) + +If both ``fields`` and ``exclude`` are declared, the ``fields`` declaration takes precedence, and ``exclude`` is +ignored. + +.. _field_ordering: + +Field ordering +-------------- + +When importing or exporting, the ordering defined by ``fields`` is used, however an explicit order for importing or +exporting fields can be set using the either the ``import_order`` or ``export_order`` options:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + fields = ('id', 'name', 'author', 'price',) + import_order = ('id', 'price',) + export_order = ('id', 'price', 'author', 'name') + +The precedence for the order of fields for import / export is defined as follows: + + * ``import_order`` or ``export_order`` (if defined) + * ``fields`` (if defined) + * The order derived from the underlying model instance. + +Where ``import_order`` or ``export_order`` contains a subset of ``fields`` then the ``import_order`` and +``export_order`` fields will be processed first. + +If no ``fields``, ``import_order`` or ``export_order`` is defined then fields are created via introspection of the model +class. The order of declared fields in the model instance is preserved, and any non-model fields are last in the +ordering. + +Model relations +--------------- + +When defining :class:`~import_export.resources.ModelResource` fields it is possible to follow model relationships:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + fields = ('author__name',) + +This example declares that the ``Author.name`` value (which has a foreign key relation to ``Book``) will appear in the +export. + +Declaring the relationship using this syntax means the following: + + * The field will be skipped when importing data. To understand how to import model relations, see + :ref:`import_model_relations`. + + * The default string value of the field will be exported. To have full control over the format of the export, + see :ref:`field_declaration`. + +.. _field_declaration: + +Explicit field declaration +-------------------------- + +We can declare fields explicitly to give us more control over the relationship between the row and the model attribute. +In the example below, we use the ``attribute`` kwarg to define the model attribute, and ``column_name`` to define the +column name (i.e. row header):: + + from import_export.fields import Field + + class BookResource(resources.ModelResource): + published_field = Field(attribute='published', column_name='published_date') + + class Meta: + model = Book + +The ``attribute`` parameter is optional, and if omitted it means that: + + 1. The field will be ignored during import. + + 2. The field will be present during export, but will have an empty value unless a + :ref:`dehydrate` method is defined. + +If using the ``fields`` attribute to :ref:`declare fields` then the declared resource attribute +name must appear in the ``fields`` list:: + + class BookResource(ModelResource): + published_field = Field(attribute='published', column_name='published_date') + + class Meta: + fields = ("published_field",) + model = Book + +.. seealso:: + + :doc:`/api_fields` + Available field types and options. + + :ref:`field_widgets` + +.. _field_widgets: + +Field widgets +============= + +A widget is an object associated with each field declaration. The widget has two roles: + +1. Transform the raw import data into a python object which is associated with the instance (see :meth:`.clean`). +2. Export persisted data into a suitable export format (see :meth:`.render`). + +There are widgets associated with character data, numeric values, dates, foreign keys. You can also define your own +widget and associate it with the field. + +A :class:`~import_export.resources.ModelResource` creates fields with a default widget for a given field type via +introspection. If the widget should be initialized with different arguments, this can be done via an explicit +declaration or via the widgets dict. + +For example, the ``published`` field is overridden to use a different date format. This format will be used both for +importing and exporting resource:: + + class BookResource(resources.ModelResource): + published = Field(attribute='published', column_name='published_date', + widget=DateWidget(format='%d.%m.%Y')) + + class Meta: + model = Book + +Alternatively, widget parameters can be overridden using the widgets dict declaration:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + widgets = { + 'published': {'format': '%d.%m.%Y'}, + } + +Declaring fields may affect the export order of the fields. If this is an issue, you can declare the +:attr:`~import_export.options.ResourceOptions.export_order` attribute. See :ref:`field_ordering`. + +.. _modify_render_return_type: + +Modify :meth:`.render` return type +---------------------------------- + +By default, :meth:`.render` will return a string type for export. There may be use cases where a native type is +required from export. If so, you can use the ``coerce_to_string`` parameter if the widget supports it. + +By default, ``coerce_to_string`` is ``True``, but if you set this to ``False``, then the native type will be returned +during export:: + + class BookResource(resources.ModelResource): + published = Field(widget=DateWidget(coerce_to_string=False)) + + class Meta: + model = Book + +If exporting via the Admin interface, the export logic will detect if exporting to either XLSX, XLS or ODS format, +and will set native types for *Numeric*, *Boolean* and *Date* values. This means that the ``coerce_to_string`` value +will be ignored and the native types will be returned. This is because in most use-cases the native type will be +expected in the exported format. If you need to modify this behavior and enforce string types in "binary" file formats +then the only way to do this is to override the widget ``render()`` method. + +.. seealso:: + + :doc:`/api_widgets` + Available widget types and options. + +Custom workflow based on import values +-------------------------------------- + +You can extend the import process to add workflow based on changes to persisted model instances. + +For example, suppose you are importing a list of books and you require additional workflow on the date of publication. +In this example, we assume there is an existing unpublished book instance which has a null 'published' field. + +There will be a one-off operation to take place on the date of publication, which will be identified by the presence of +the 'published' field in the import file. + +To achieve this, we need to test the existing value taken from the persisted instance (i.e. prior to import +changes) against the incoming value on the updated instance. +Both ``instance`` and ``original`` are attributes of :class:`~import_export.results.RowResult`. + +You can override the :meth:`~import_export.resources.Resource.after_import_row` method to check if the +value changes:: + + class BookResource(resources.ModelResource): + + def after_import_row(self, row, row_result, **kwargs): + if getattr(row_result.original, "published") is None \ + and getattr(row_result.instance, "published") is not None: + # import value is different from stored value. + # exec custom workflow... + + class Meta: + model = Book + store_instance = True + +.. note:: + + * The ``original`` attribute will be null if :attr:`~import_export.options.ResourceOptions.skip_diff` is True. + * The ``instance`` attribute will be null if :attr:`~import_export.options.ResourceOptions.store_instance` is False. + +Validation during import +======================== + +The import process will include basic validation during import. This validation can be customized or extended if +required. + +The import process distinguishes between: + +#. Validation errors which arise when failing to parse import data correctly. + +#. General exceptions which arise during processing. + +Errors are retained as :class:`~import_export.results.Error` instances in each :class:`~import_export.results.RowResult` +instance, which is stored in the single :class:`~import_export.results.Result` instance which is returned from the +import process. + +The :meth:`~import_export.resources.Resource.import_data` method takes optional parameters which can be used to +customize the handling of errors. Refer to the method documentation for specific details. + +Validation Errors +----------------- + +During import of a row, each field is iterated and any `ValueError `_ +errors raised by Widgets are stored in an instance of Django's +`ValidationError `_. + +Validation errors are retained within the :attr:`~import_export.results.Result.invalid_rows` list as a +:class:`~import_export.results.InvalidRow` instance. + +If importing programmatically, you can set the ``raise_errors`` parameter of :meth:`~import_export.resources.Resource.import_data` +to ``True``, which will mean the process will exit at the first row which has errors:: + + rows = [ + (1, 'Lord of the Rings', '1996-01-01'), + (2, 'The Hobbit', '1996-01-02x'), + ] + dataset = tablib.Dataset(*rows, headers=['id', 'name', 'published']) + resource = BookResource() + self.resource.import_data(self.dataset, raise_errors=True) + +The above process will exit with a row number and error (formatted for clarity):: + + ImportError: 2: {'published': ['Value could not be parsed using defined date formats.']} + (OrderedDict({'id': 2, 'name': 'The Hobbit', 'published': 'x'})) + +To iterate over all validation errors produced from an import, pass ``False`` to ``raise_errors``:: + + result = self.resource.import_data(self.dataset, raise_errors=False) + for row in result.invalid_rows: + print(f"--- row {row.number} ---") + for field, error in row.error.error_dict.items(): + print(f"{field}: {error} ({row.values})") + +If using the :ref:`Admin UI`, errors are presented to the user during import (see below). + +Generic Errors +-------------- + +Generic errors are raised during import for cases which are not validation errors. +For example, generic errors are usually raised at the point the model instance is saved, such as attempt to save a float +to a int field. Because generic errors are raised from a lower point in the stack, it is not always possible to +identify which field caused the error. + +Generic errors are retained within the :attr:`~import_export.results.Result.error_rows` list as a +:class:`~import_export.results.ErrorRow` instance. + +The ``raise_errors`` parameter can be used during programmatic import to halt the import at the first error:: + + rows = [ + (1, 'Lord of the Rings', '999'), + (2, 'The Hobbit', 'x'), + ] + dataset = tablib.Dataset(*rows, headers=['id', 'name', 'price']) + resource = BookResource() + result = resource.import_data( + dataset, + raise_errors=True + ) + +The above process will exit with a row number and error (formatted for clarity):: + + ImportError: 1: [] + (OrderedDict({'id': 1, 'name': 'Lord of the Rings', 'price': '1x'})) + +To iterate over all generic errors produced from an import, pass ``False`` to ``raise_errors``:: + + result = self.resource.import_data(self.dataset, raise_errors=False) + for row in result.error_rows: + print(f"--- row {row.number} ---") + for field, error in row.error.error_dict.items(): + print(f"{field}: {error} ({error.row})") + + +.. note:: + + `full_clean() `_ + is only called on the model instance if the Resource option + :attr:`~import_export.options.ResourceOptions.clean_model_instances` is enabled. + +Field level validation +---------------------- + +Validation of input can be performed during import by a widget's :meth:`~import_export.widgets.Widget.clean` method by +raising a `ValueError `_. +Consult the :doc:`widget documentation ` for more information. + +You can supply your own field level validation by overriding :meth:`~import_export.widgets.Widget.clean`, for example:: + + class PositiveIntegerWidget(IntegerWidget): + """Returns a positive integer value""" + + def clean(self, value, row=None, **kwargs): + val = super().clean(value, row=row, **kwargs) + if val < 0: + raise ValueError("value must be positive") + return val + +Field level errors will be presented in the :ref:`Admin UI`, for example: + +.. figure:: _static/images/date-widget-validation-error.png + + A screenshot showing a field specific error. + +Instance level validation +------------------------- + +You can optionally configure import-export to perform model instance validation during import by enabling the +:attr:`~import_export.options.ResourceOptions.clean_model_instances` attribute. + +You can override the +`full_clean() `_ +method to provide extra validation, either at field or instance level:: + + class Book(models.Model): + + def full_clean(self, exclude=None, validate_unique=True): + super().full_clean(exclude, validate_unique) + + # non field specific validation + if self.published < date(1900, 1, 1): + raise ValidationError("book is out of print") + + # field specific validation + if self.name == "Ulysses": + raise ValidationError({"name": "book has been banned"}) + +.. figure:: _static/images/non-field-specific-validation-error.png + + A screenshot showing a non field specific error. + +Customize error handling +------------------------ + +You are free to subclass or replace the classes defined in :mod:`~import_export.results`. Override any or all of the +following hooks to customize error handling: + +* :meth:`~import_export.resources.Resource.get_result_class` +* :meth:`~import_export.resources.Resource.get_row_result_class` +* :meth:`~import_export.resources.Resource.get_error_result_class` + +.. _import_model_relations: + +Importing model relations +========================= + +If you are importing data for a model instance which has a foreign key relationship to another model then import-export +can handle the lookup and linking to the related model. + +Foreign Key relations +--------------------- + +``ForeignKeyWidget`` allows you to declare a reference to a related model. For example, if we are importing a 'book' +csv file, then we can have a single field which references an author by name. + +:: + + id,title,author + 1,The Hobbit, J. R. R. Tolkien + +We would have to declare our ``BookResource`` to use the author name as the foreign key reference:: + + from import_export import fields + from import_export.widgets import ForeignKeyWidget + + class BookResource(resources.ModelResource): + author = fields.Field( + column_name='author', + attribute='author', + widget=ForeignKeyWidget(Author, field='name')) + + class Meta: + model = Book + fields = ('author',) + +By default, ``ForeignKeyWidget`` will use 'pk' as the lookup field, hence we have to pass 'name' as the lookup field. +This relies on 'name' being a unique identifier for the related model instance, meaning that a lookup on the related +table using the field value will return exactly one result. + +This is implemented as a ``Model.objects.get()`` query, so if the instance in not uniquely identifiable based on the +given arg, then the import process will raise either ``DoesNotExist`` or ``MultipleObjectsReturned`` errors. + +See also :ref:`creating-non-existent-relations`. + +Refer to the :class:`~.ForeignKeyWidget` documentation for more detailed information. + +.. note:: + + If you are exporting a field which uses ``ForeignKeyWidget`` then the default formatting for the field will be + applied. + If you need better control over the format of the exported value (for example, formatting a date), then use a + :ref:`dehydrate` method or create a subclass of ``ForeignKeyWidget``. + Override :meth:`~import_export.widgets.Widget.render` to create custom formatting of output. + +Many-to-many relations +---------------------- + +``ManyToManyWidget`` allows you to import m2m references. For example, we can import associated categories with our +book import. The categories refer to existing data in a ``Category`` table, and are uniquely referenced by category +name. We use the pipe separator in the import file, which means we have to declare this in the ``ManyToManyWidget`` +declaration. + +:: + + id,title,categories + 1,The Hobbit,Fantasy|Classic|Movies + +:: + + class BookResource(resources.ModelResource): + categories = fields.Field( + column_name='categories', + attribute='categories', + widget=widgets.ManyToManyWidget(Category, field='name', separator='|') + ) + + class Meta: + model = Book + +.. _creating-non-existent-relations: + +Creating non-existent relations +------------------------------- + +The examples above rely on the relation data being present prior to the import. It is a common use-case to create the +data if it does not already exist. A simple way to achieve this is to override the ``ForeignKeyWidget`` +:meth:`~import_export.widgets.ForeignKeyWidget.clean` method:: + + class AuthorForeignKeyWidget(ForeignKeyWidget): + def clean(self, value, row=None, **kwargs): + try: + val = super().clean(value) + except Author.DoesNotExist: + val = Author.objects.create(name=row['author']) + return val + +Now you will need to declare the widget in the Resource:: + + class BookResource(resources.ModelResource): + + author = fields.Field( + attribute="author", + column_name="author", + widget=AuthorForeignKeyWidget(Author, "name") + ) + + class Meta: + model = Book + +The code above can be adapted to handle m2m relationships, see +`this thread `_. + +Customize relation lookup +------------------------- + +The ``ForeignKeyWidget`` and ``ManyToManyWidget`` widgets will look for relations by searching the entire relation +table for the imported value. This is implemented in the :meth:`~import_export.widgets.ForeignKeyWidget.get_queryset` +method. For example, for an ``Author`` relation, the lookup calls ``Author.objects.all()``. + +In some cases, you may want to customize this behaviour, and it can be a requirement to pass dynamic values in. +For example, suppose we want to look up authors associated with a certain publisher id. We can achieve this by passing +the publisher id into the ``Resource`` constructor, which can then be passed to the widget:: + + + class BookResource(resources.ModelResource): + + def __init__(self, publisher_id): + super().__init__() + self.fields["author"] = fields.Field( + attribute="author", + column_name="author", + widget=AuthorForeignKeyWidget(publisher_id), + ) + +The corresponding ``ForeignKeyWidget`` subclass:: + + class AuthorForeignKeyWidget(ForeignKeyWidget): + model = Author + field = 'name' + + def __init__(self, publisher_id, **kwargs): + super().__init__(self.model, field=self.field, **kwargs) + self.publisher_id = publisher_id + + def get_queryset(self, value, row, *args, **kwargs): + return self.model.objects.filter(publisher_id=self.publisher_id) + +Then if the import was being called from another module, we would pass the ``publisher_id`` into the Resource:: + + >>> resource = BookResource(publisher_id=1) + +If you need to pass dynamic values to the Resource when importing via the Admin UI, refer to +See :ref:`dynamically_set_resource_values`. + +Django Natural Keys +------------------- + +The ``ForeignKeyWidget`` also supports using Django's natural key functions. A +manager class with the ``get_by_natural_key`` function is required for importing +foreign key relationships by the field model's natural key, and the model must +have a ``natural_key`` function that can be serialized as a JSON list in order to +export data. + +The primary utility for natural key functionality is to enable exporting data +that can be imported into other Django environments with different numerical +primary key sequences. The natural key functionality enables handling more +complex data than specifying either a single field or the PK. + +The example below illustrates how to create a field on the ``BookResource`` that +imports and exports its author relationships using the natural key functions +on the ``Author`` model and modelmanager. + +The resource _meta option ``use_natural_foreign_keys`` enables this setting +for all Models that support it. + +:: + + from import_export.fields import Field + from import_export.widgets import ForeignKeyWidget + + class AuthorManager(models.Manager): + + def get_by_natural_key(self, name): + return self.get(name=name) + + class Author(models.Model): + + objects = AuthorManager() + + name = models.CharField(max_length=100) + birthday = models.DateTimeField(auto_now_add=True) + + def natural_key(self): + return (self.name,) + + # Only the author field uses natural foreign keys. + class BookResource(resources.ModelResource): + + author = Field( + column_name = "author", + attribute = "author", + widget = ForeignKeyWidget(Author, use_natural_foreign_keys=True) + ) + + class Meta: + model = Book + + # All widgets with foreign key functions use them. + class BookResource(resources.ModelResource): + + class Meta: + model = Book + use_natural_foreign_keys = True + +Read more at `Django Serialization `_. + +Create or update model instances +================================ + +When you are importing a file using import-export, the file is processed row by row. For each row, the +import process is going to test whether the row corresponds to an existing stored instance, or whether a new instance +is to be created. + +If an existing instance is found, then the instance is going to be *updated* with the values from the imported row, +otherwise a new row will be created. + +In order to test whether the instance already exists, import-export needs to use a field (or a combination of fields) +in the row being imported. The idea is that the field (or fields) will uniquely identify a single instance of the model +type you are importing. + +To define which fields identify an instance, use the ``import_id_fields`` meta attribute. You can use this declaration +to indicate which field (or fields) should be used to uniquely identify the row. If you don't declare +``import_id_fields``, then a default declaration is used, in which there is only one field: 'id'. + +For example, you can use the 'isbn' number instead of 'id' to uniquely identify a Book as follows:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + import_id_fields = ('isbn',) + fields = ('isbn', 'name', 'author', 'price',) + +.. note:: + + If setting ``import_id_fields``, you must ensure that the data can uniquely identify a single row. If the chosen + field(s) select more than one row, then a ``MultipleObjectsReturned`` exception will be raised. If no row is + identified, then ``DoesNotExist`` exception will be raised. + +.. _dynamic_fields: + +Using 'dynamic fields' to identify existing instances +----------------------------------------------------- + +There are some use-cases where a field defined in ``import_id_fields`` is not present in the dataset. An example of +this would be dynamic fields, where a field is generated from other data and then used as an identifier. For example:: + + class BookResource(resources.ModelResource): + + def before_import_row(self, row, **kwargs): + # generate a value for an existing field, based on another field + row["hash_id"] = hashlib.sha256(row["name"].encode()).hexdigest() + + class Meta: + model = Book + # A 'dynamic field' - i.e. is used to identify existing rows + # but is not present in the dataset + import_id_fields = ("hash_id",) + +In the above example, a dynamic field called *hash_id* is generated and added to the dataset. In this example, an +error will be raised because *hash_id* is not present in the dataset. To resolve this, update the dataset before +import to add the dynamic field as a header:: + + class BookResource(resources.ModelResource): + + def before_import(self, dataset, **kwargs): + # mimic a 'dynamic field' - i.e. append field which exists on + # Book model, but not in dataset + dataset.headers.append("hash_id") + super().before_import(dataset, **kwargs) + + def before_import_row(self, row, **kwargs): + row["hash_id"] = hashlib.sha256(row["name"].encode()).hexdigest() + + class Meta: + model = Book + # A 'dynamic field' - i.e. is used to identify existing rows + # but is not present in the dataset + import_id_fields = ("hash_id",) + + +Access instances after import +============================= + +Access instance summary data +---------------------------- + +The instance pk and representation (i.e. output from ``repr()``) can be accessed after import:: + + rows = [ + (1, 'Lord of the Rings'), + ] + dataset = tablib.Dataset(*rows, headers=['id', 'name']) + resource = BookResource() + result = resource.import_data(dataset) + + for row_result in result: + print("%d: %s" % (row_result.object_id, row_result.object_repr)) + +Access full instance data +------------------------- + +All 'new', 'updated' and 'deleted' instances can be accessed after import if the +:attr:`~import_export.options.ResourceOptions.store_instance` meta attribute is set. + +For example, this snippet shows how you can retrieve persisted row data from a result:: + + class BookResourceWithStoreInstance(resources.ModelResource): + class Meta: + model = Book + store_instance = True + + rows = [ + (1, 'Lord of the Rings'), + ] + dataset = tablib.Dataset(*rows, headers=['id', 'name']) + resource = BookResourceWithStoreInstance() + result = resource.import_data(dataset) + + for row_result in result: + print(row_result.instance.pk) + +Handling duplicate data +======================= + +If an existing instance is identified during import, then the existing instance will be updated, regardless of whether +the data in the import row is the same as the persisted data or not. You can configure the import process to skip the +row if it is duplicate by using setting :attr:`~import_export.options.ResourceOptions.skip_unchanged`. + +If :attr:`~import_export.options.ResourceOptions.skip_unchanged` is enabled, then the import process will check each +defined import field and perform a simple comparison with the existing instance, and if all comparisons are equal, then +the row is skipped. Skipped rows are recorded in the row :class:`~import_export.results.RowResult` object. + +You can override the :meth:`~.skip_row` method to have full control over the skip row implementation. + +Also, the :attr:`~import_export.options.ResourceOptions.report_skipped` option controls whether skipped records appear +in the import :class:`~import_export.results.RowResult` object, and whether skipped records will show in the import +preview page in the Admin UI:: + + class BookResource(resources.ModelResource): + + class Meta: + model = Book + skip_unchanged = True + report_skipped = False + fields = ('id', 'name', 'price',) + +.. seealso:: + + :doc:`/api_resources` + +How to set a value on all imported instances prior to persisting +================================================================ + +You may have a use-case where you need to set the same value on each instance created during import. +For example, it might be that you need to set a value read at runtime on all instances during import. + +You can define your resource to take the associated instance as a param, and then set it on each import instance:: + + class BookResource(ModelResource): + + def __init__(self, publisher_id): + self.publisher_id = publisher_id + + def before_save_instance(self, instance, row, **kwargs): + instance.publisher_id = self.publisher_id + + class Meta: + model = Book + +See :ref:`dynamically_set_resource_values`. + +.. _advanced_data_manipulation_on_export: + +Data manipulation on export +=========================== + +Not all data can be easily extracted from an object/model attribute. +In order to turn complicated data model into a (generally simpler) processed +data structure on export, ``dehydrate_`` method should be defined:: + + from import_export.fields import Field + + class BookResource(resources.ModelResource): + full_title = Field() + + class Meta: + model = Book + + def dehydrate_full_title(self, book): + book_name = getattr(book, "name", "unknown") + author_name = getattr(book.author, "name", "unknown") + return '%s by %s' % (book_name, author_name) + +In this case, the export looks like this: + + >>> from app.admin import BookResource + >>> dataset = BookResource().export() + >>> print(dataset.csv) + full_title,id,name,author,author_email,imported,published,price,categories + Some book by 1,2,Some book,1,,0,2012-12-05,8.85,1 + +It is also possible to pass a method name or a callable to the :meth:`~import_export.fields.Field` constructor. +If this method name or callable is supplied, then it will be called as the 'dehydrate' method. For example:: + + from import_export.fields import Field + + # Using method name + class BookResource(resources.ModelResource): + full_title = Field(dehydrate_method='custom_dehydrate_method') + + class Meta: + model = Book + + def custom_dehydrate_method(self, book): + return f"{book.name} by {book.author.name}" + + # Using a callable directly + def custom_dehydrate_callable(book): + return f"{book.name} by {book.author.name}" + + class BookResource(resources.ModelResource): + full_title = Field(dehydrate_method=custom_dehydrate_callable) + + class Meta: + model = Book + + +Filtering querysets during export +================================= + +You can use :meth:`~import_export.resources.Resource.filter_export` to filter querysets +during export. See also :ref:`customize_admin_export_forms`. + +Modify dataset after export +=========================== + +The :meth:`~import_export.resources.Resource.after_export` method allows you to modify the +`tablib `_ dataset before it is rendered in the export format. + +This can be useful for adding dynamic columns or applying custom logic to the final dataset. + +Import and export with different fields +======================================= + +If you would like to import one set of fields, and then export a different set, then the recommended way to do this +is to define two resources:: + + class BookImportResource(ModelResource): + class Meta: + model = Book + fields = ["id", "name"] + + + class BookExportResource(ModelResource): + class Meta: + model = Book + fields = ["id", "name", "published"] + +If you are using these resources in the Admin UI, declare them in your +:ref:`admin class`. + +Modify xlsx format +================== + +It is possible to modify the output of any XLSX export. The output bytes can be read and then modified using the +`openpyxl `_ library (which can be included as an import_export +dependency). + +You can override :meth:`~import_export.admin.ExportMixin.get_export_data` as follows:: + + def get_export_data(self, file_format, request, queryset, **kwargs): + blob = super().get_export_data(file_format, request, queryset, **kwargs) + workbook_data = BytesIO(blob) + workbook_data.seek(0) + wb = openpyxl.load_workbook(workbook_data) + # modify workbook as required + output = BytesIO() + wb.save(output) + return output.getvalue() + +Custom export file name +======================= + +Customize the export file name by overriding :meth:`~import_export.admin.ExportMixin.get_export_filename`. + +Signals +======= + +To hook in the import-export workflow, you can connect to ``post_import``, +``post_export`` signals:: + + from django.dispatch import receiver + from import_export.signals import post_import, post_export + + @receiver(post_import, dispatch_uid='balabala...') + def _post_import(model, **kwargs): + # model is the actual model instance which after import + pass + + @receiver(post_export, dispatch_uid='balabala...') + def _post_export(model, **kwargs): + # model is the actual model instance which after export + pass + +.. _concurrent-writes: + +Concurrent writes +================= + +There is specific consideration required if your application allows concurrent writes to data during imports. + +For example, consider this scenario: + +#. An import process is run to import new books identified by title. +#. The :meth:`~import_export.resources.Resource.get_or_init_instance` is called and identifies that there is no + existing book with this title, hence the import process will create it as a new record. +#. At that exact moment, another process inserts a book with the same title. +#. As the row import process completes, :meth:`~import_export.resources.Resource.save` is called and an error is thrown + because the book already exists in the database. + +By default, import-export does not prevent this situation from occurring, therefore you need to consider what processes +might be modifying shared tables during imports, and how you can mitigate risks. If your database enforces integrity, +then you may get errors raised, if not then you may get duplicate data. + +Potential solutions are: + +* Use one of the :doc:`import workflow` methods to lock a table during import if the database supports + it. + + * This should only be done in exceptional cases because there will be a performance impact. + * You will need to release the lock both in normal workflow and if there are errors. + +* Override :meth:`~import_export.resources.Resource.do_instance_save` to perform a + `update_or_create() `_. + This can ensure that data integrity is maintained if there is concurrent access. + +* Modify working practices so that there is no risk of concurrent writes. For example, you could schedule imports to + only run at night. + +This issue may be more prevalent if using :doc:`bulk imports`. This is because instances are held in +memory for longer before being written in bulk, therefore there is potentially more risk of another process modifying +an instance before it has been persisted. + +Additional configuration +======================== + +Please refer to the :doc:`API documentation` for additional configuration options. diff --git a/docs/api_exceptions.rst b/docs/api_exceptions.rst new file mode 100644 index 000000000..3c111a23b --- /dev/null +++ b/docs/api_exceptions.rst @@ -0,0 +1,23 @@ +========== +Exceptions +========== + +.. currentmodule:: import_export.exceptions + +ImportExportError +----------------- + +.. autoclass:: import_export.exceptions.ImportExportError + :members: + +FieldError +---------- + +.. autoclass:: import_export.exceptions.FieldError + :members: + +ImportError +----------- + +.. autoclass:: import_export.exceptions.ImportError + :members: diff --git a/docs/api_forms.rst b/docs/api_forms.rst index 540a7fa5c..2f122d36f 100644 --- a/docs/api_forms.rst +++ b/docs/api_forms.rst @@ -4,6 +4,12 @@ Forms .. module:: import_export.forms +.. autoclass:: ImportExportFormBase + .. autoclass:: ImportForm .. autoclass:: ConfirmImportForm + +.. autoclass:: ExportForm + +.. autoclass:: SelectableFieldsExportForm diff --git a/docs/api_mixins.rst b/docs/api_mixins.rst new file mode 100644 index 000000000..4b9b50d9f --- /dev/null +++ b/docs/api_mixins.rst @@ -0,0 +1,35 @@ +====== +Mixins +====== + +.. currentmodule:: import_export.mixins + +BaseImportExportMixin +--------------------- + +.. autoclass:: import_export.mixins.BaseImportExportMixin + :members: + +BaseImportMixin +--------------- + +.. autoclass:: import_export.mixins.BaseImportMixin + :members: + +BaseExportMixin +--------------- + +.. autoclass:: import_export.mixins.BaseExportMixin + :members: + +ExportViewMixin +--------------- + +.. autoclass:: import_export.mixins.ExportViewMixin + :members: + +ExportViewFormMixin +------------------- + +.. autoclass:: import_export.mixins.ExportViewFormMixin + :members: diff --git a/docs/api_resources.rst b/docs/api_resources.rst index f579210dc..37ab288ce 100644 --- a/docs/api_resources.rst +++ b/docs/api_resources.rst @@ -18,7 +18,7 @@ ModelResource ResourceOptions (Meta) ---------------------- -.. autoclass:: import_export.resources.ResourceOptions +.. autoclass:: import_export.options.ResourceOptions :members: modelresource_factory diff --git a/docs/api_results.rst b/docs/api_results.rst index 890340b32..1e7072431 100644 --- a/docs/api_results.rst +++ b/docs/api_results.rst @@ -9,3 +9,15 @@ Result .. autoclass:: import_export.results.Result :members: + +RowResult +--------- + +.. autoclass:: import_export.results.RowResult + :members: + +InvalidRow +--------- + +.. autoclass:: import_export.results.InvalidRow + :members: diff --git a/docs/api_widgets.rst b/docs/api_widgets.rst index 94b69a5b4..d5ca7189c 100644 --- a/docs/api_widgets.rst +++ b/docs/api_widgets.rst @@ -5,6 +5,9 @@ Widgets .. autoclass:: import_export.widgets.Widget :members: +.. autoclass:: import_export.widgets.NumberWidget + :members: + .. autoclass:: import_export.widgets.IntegerWidget :members: diff --git a/docs/bulk_import.rst b/docs/bulk_import.rst index 249c92fca..4d4ddce57 100644 --- a/docs/bulk_import.rst +++ b/docs/bulk_import.rst @@ -2,13 +2,17 @@ Bulk imports ============= -django-import-export provides a 'bulk mode' to improve the performance of importing large datasets. +import-export provides a 'bulk mode' to improve the performance of importing large datasets. -In normal operation, django-import-export will call ``instance.save()`` as each row in a dataset is processed. Bulk mode means that ``instance.save()`` is not called, and instances are instead added to temporary lists. Once the number of rows processed matches the ``batch_size`` value, then either ``bulk_create()`` or ``bulk_update()`` is called. +In normal operation, import-export will call ``instance.save()`` as each row in a dataset is processed. Bulk +mode means that ``instance.save()`` is not called, and instances are instead added to temporary lists. Once the number +of rows processed matches the ``batch_size`` value, then either ``bulk_create()`` or ``bulk_update()`` is called. -If ``batch_size`` is set to ``None``, then ``bulk_create()`` / ``bulk_update()`` is only called once all rows have been processed. +If ``batch_size`` is set to ``None``, then ``bulk_create()`` / ``bulk_update()`` is only called once all rows have been +processed. -Bulk deletes are also supported, by applying a ``filter()`` to the temporary object list, and calling ``delete()`` on the resulting query set. +Bulk deletes are also supported, by applying a ``filter()`` to the temporary object list, and calling ``delete()`` on +the resulting query set. Caveats ======= @@ -19,13 +23,24 @@ Caveats * Bulk operations do not work with many-to-many relationships. -* Take care to ensure that instances are validated before bulk operations are called. This means ensuring that resource fields are declared appropriately with the correct widgets. If an exception is raised by a bulk operation, then that batch will fail. It's also possible that transactions can be left in a corrupted state. Other batches may be successfully persisted, meaning that you may have a partially successful import. +* Take care to ensure that instances are validated before bulk operations are called. This means ensuring that + resource fields are declared appropriately with the correct widgets. If an exception is raised by a bulk operation, + then that batch will fail. It's also possible that transactions can be left in a corrupted state. Other batches may + be successfully persisted, meaning that you may have a partially successful import. -* In bulk mode, exceptions are not linked to a row. Any exceptions raised by bulk operations are logged (and re-raised if ``raise_errors`` is true). +* In bulk mode, exceptions are not linked to a row. Any exceptions raised by bulk operations are logged and returned + as critical (non-validation) errors (and re-raised if ``raise_errors`` is true). -* If you use :class:`~import_export.widgets.ForeignKeyWidget` then this can affect performance, because it reads from the database for each row. If this is an issue then create a subclass which caches ``get_queryset()`` results rather than reading for each invocation. +* If you use :class:`~import_export.widgets.ForeignKeyWidget` then this should not affect performance during lookups, + because the ``QuerySet`` cache should be used. Some more information + `here `_. -For more information, please read the Django documentation on `bulk_create() `_ and `bulk_update() `_. +* If there is the potential for concurrent writes to a table during a bulk operation, then you need to consider the + potential impact of this. Refer to :ref:`concurrent-writes` for more information. + +For more information, please read the Django documentation on +`bulk_create() `_ and +`bulk_update() `_. .. _performance_tuning Performance tuning @@ -35,10 +50,19 @@ Consider the following if you need to improve the performance of imports. * Enable ``use_bulk`` for bulk create, update and delete operations (read `Caveats`_ first). -* If your import is creating instances only (i.e. you are sure there are no updates), then set ``force_init_instance = True``. +* If your import is creating instances only (i.e. you are sure there are no updates), then set + ``force_init_instance = True``. + +* If your import is updating or creating instances, and you have a set of existing instances which can be stored in + memory, use :class:`~import_export.instance_loaders.CachedInstanceLoader` -* If your import is updating or creating instances, and you have a set of existing instances which can be stored in memory, use :class:`~import_export.instance_loaders.CachedInstanceLoader` +* By default, import rows are compared with the persisted representation, and the difference is stored against each row + result. If you don't need this diff, then disable it with ``skip_diff = True``. -* By default, import rows are compared with the persisted representation, and the difference is stored against each row result. If you don't need this diff, then disable it with ``skip_diff = True``. +* Setting ``batch_size`` to a different value is possible, but tests showed that setting this to ``None`` always + resulted in worse performance in both duration and peak memory. + +Testing +======= -* Setting ``batch_size`` to a different value is possible, but tests showed that setting this to ``None`` always resulted in worse performance in both duration and peak memory. \ No newline at end of file +Scripts are provided to enable testing and benchmarking of bulk imports. See :ref:`testing:Bulk testing`. diff --git a/docs/celery.rst b/docs/celery.rst index 3ddfd4e9b..c51299a75 100644 --- a/docs/celery.rst +++ b/docs/celery.rst @@ -1,5 +1,10 @@ +.. _celery: + =========== -Using celery to perform imports +Using celery =========== -You can use the 3rd party `django-import-export-celery `_ application to process long imports in celery. +You can use one of the third-party applications to process long imports and exports in Celery: + +* `django-import-export-celery `_ (`PyPI `_) +* `django-import-export-extensions `_ (`PyPI `_) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9623a0b5c..1b33f144f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1,66 +1,621 @@ Changelog ========= +.. warning:: + + If upgrading from v3, v4 introduces breaking changes. Please refer to :doc:`release notes`. + +4.3.7 (2025-02-25) +------------------ + +- Update French translation (`2042 `_) + +4.3.6 (2025-02-21) +------------------ + +- Add flag to ignore empty rows in XLSX import (`2028 `_) +- Add support for Django 5.2 (`2037 `_) +- Fix Chinese translation (`2040 `_) + +4.3.5 (2025-02-01) +------------------ + +- Clarify documentation on creating non-existent relations (`2029 `_) +- Update Turkish translations (`2031 `_) + +4.3.4 (2025-01-15) +------------------ + +- Handle QuerySets called with ``values()`` on export (`2011 `_) + +4.3.3 (2024-12-02) +------------------ + +- Update all translations (`2014 `_) + +4.3.2 (2024-12-01) +------------------ + +- Updated Farsi translation (`2008 `_) +- Updated German translation (`2012 `_) + +4.3.1 (2024-11-19) +------------------ + +- Fix imports for openpyxl (`2005 `_) + +4.3.0 (2024-11-19) +------------------ + +- Addition of import & export management commands (`1992 `_) +- Handle ``IllegalCharacterError`` in xlsx exports (`2001 `_) +- Add ``__repr__`` method to InvalidRow for improved debugging (`2003 `_) + +4.2.1 (2024-11-11) +------------------ + +- Removed dependency files in favour of ``pyproject.toml`` (`1982 `_) +- Documentation updates (`1989 `_) +- Fix crash on export of tz-aware datetime to binary formats (`1995 `_) + +4.2.0 (2024-10-23) +------------------ + +This release contains breaking changes. Please refer to :doc:`release notes`. + +- Upgraded tablib version (`1627 `_) +- Document overriding formats (`1868 `_) +- Consistent queryset creation in ModelAdmin export mixin (`1890 `_) +- Deprecated :meth:`~import_export.admin.ExportMixin.get_valid_export_item_pks` in favour of :meth:`~import_export.admin.ExportMixin.get_queryset` (`1890 `_) +- Improve deprecation warning for ``ExportViewFormMixin`` to report at point of class definition (`1900 `_) +- Fix export for fields with custom declared name (`1903 `_) +- Hide the "Resource" form when it only has one option (`1908 `_) +- Update date, time and datetime widget render method to handle derived instance (`1918 `_) +- Add support for Django 5.1 (`1926 `_) +- Accept numbers using the numeric separators of the current language in number widgets (:meth:`~import_export.widgets.FloatWidget`, :meth:`~import_export.widgets.IntegerWidget`, :meth:`~import_export.widgets.DecimalWidget`) (`1927 `_) +- Added warning for declared fields excluded from fields whitelist (`1930 `_) +- Fix v3 regression: handle native types on export to spreadsheet (`1939 `_) +- Fix export button displayed on change screen when export permission not assigned (`1942 `_) +- Fix crash for Django 5.1 when rows are skipped (`1944 `_) +- Allow callable in dehydrate method (`1950 `_) +- Fix crash when Resource fields declared incorrectly (`1963 `_) +- Updated dependencies (`1979 `_) + +4.1.1 (2024-07-08) +------------------ + +- Restore return value for deprecated method :meth:`~import_export.resources.Resource.get_fields` (`1897 `_) + +4.1.0 (2024-06-25) +------------------ + +- Improve Error class (`1882 `_) +- Fix response content assertions (`1883 `_) +- Admin UI: display checkboxes before labels in export form (`1884 `_) +- deprecated unused method: :meth:`~import_export.resources.Resource.get_fields` (`1885 `_) + +4.0.10 (2024-06-25) +------------------ + +- remove django version check for custom storages (`1889 `_) + +4.0.9 (2024-06-18) +------------------ + +- docs: clarify :meth:`~import_export.resources.Resource.for_delete` documentation (`1877 `_) +- fix default ``Field`` returns empty string instead of *'None'* (`1872 `_) +- revert setting default value for ``attribute`` (`1875 `_) + +4.0.8 (2024-06-13) +------------------ + +- docs: clarify widget configuration (`1865 `_) +- Enable skip export confirm page (`1867 `_) + +4.0.7 (2024-05-30) +------------------ + +- fix documentation to show correct method for reading form data on export (`1859 `_) +- Admin UI: display both field name and column name on export (`1857 `_) +- fix export declared field with custom column name (`1861 `_) +- fix declared fields do not have correct Widget class set (`1861 `_) +- docs: clarify changes to ``CharWidget`` in v4 (`1862 `_) +- refactor :class:`~import_export.resources.Resource` to remove code duplication in export (`1863 `_) + +4.0.6 (2024-05-27) +------------------ + +- Added additional test for export field order (`1848 `_) +- fix crash on import when relation has custom PK (`1853 `_) +- fix crash on export from action when instance has custom PK (`1854 `_) + +4.0.5 (2024-05-23) +------------------ + +- Fix for invalid build due to malformed README.rst (`1851 `_) + +4.0.4 (2024-05-23) +------------------ + +- Refactored ``DateWidget`` & ``DateTimeWidget`` to remove code duplication (`1839 `_) +- Release note documentation updated (`1840 `_) +- Added missing migration to example app (`1843 `_) +- Fix admin UI display of field import order (`1849 `_) + +4.0.3 (2024-05-16) +------------------ + +- Support widgets with CSS and JS media in ImportForm (`1807 `_) +- Documentation updates (`1833 `_) +- Clarified documentation when importing with ``import_id_fields`` (`1836 `_) +- re-add ``resource_class`` deprecation warning (`1837 `_) +- fixed Arabic translation for 'import' word (`1838 `_) + +4.0.2 (2024-05-13) +------------------ + +- fix export with custom column name (`1821 `_) +- fix allow ``column_name`` to be declared in ``fields`` list (`1822 `_) +- fix clash between ``key_is_id`` and ``use_natural_foreign_keys`` (`1824 `_) +- remove unreachable code (`1825 `_) +- fix issue with widget assignment for custom ``ForeignKey`` subclasses (`1826 `_) +- performance: select of valid pks for export restricted to action exports (`1827 `_) +- fix crash on export with custom column name (`1828 `_) +- remove outdated datetime formatting logic (`1830 `_) + +4.0.1 (2024-05-08) +------------------ + +- fix crash on export when model has no ``id`` (`1802 `_) +- fix Widget crash when django Field subclass is used (`1805 `_) +- fix regression: allow imports when default ``import_id_field`` is not present (`1813 `_) + +4.0.0 (2024-04-27) +------------------ + +Deprecations +############ + +- Removed v3 deprecations (`1629 `_) +- Deprecation of ``ExportViewFormMixin`` (`1666 `_) + +Enhancements +############ + +- Refactor ordering logic (`1626 `_) + + - Refactor 'diff' logic to avoid calling dehydrate methods + + - Refactor declarations of ``fields``, ``import_order`` and ``export_order`` to fix ordering issues + +- refactor to export HTML / formulae escaping updates (`1638 `_) +- removed unused variable ``Result.new_record`` (`1640 `_) +- Refactor ``resources.py`` to standardise method args (`1641 `_) +- added specific check for missing ``import_id_fields`` (`1645 `_) +- Enable optional tablib dependencies (`1647 `_) +- added :meth:`~import_export.widgets.ForeignKeyWidget.get_lookup_kwargs` to make it easier to override object lookup (`1651 `_) +- Standardised interface of :meth:`~import_export.widgets.Widget.render` (`1657 `_) +- Fix declaring existing model field(s) in ModelResource altering export order (`1663 `_) +- Added :meth:`~import_export.resources.Resource.do_instance_save` helper method (`1668 `_) +- Enable defining Resource model as a string (`1669 `_) +- Support multiple Resources for export (`1671 `_) +- Support export from model change form (`1687 `_) +- Import form defaults to read-only field if only one format defined (`1690 `_) +- Updated Admin UI to track deleted and skipped Imports (`1691 `_) +- Added customizable ``MediaStorage`` (`1708 `_) +- Added customization of Admin UI import error messages (`1727 `_) +- Improve output of error messages (`1729 `_) +- Added feature: selectable fields for admin export view (`1734 `_) +- Added specific check for declared :attr:`~import_export.options.ResourceOptions.import_id_fields` not in dataset (`1735 `_) +- added try / catch to :meth:`~import_export.results.RowResult.add_instance_info` to handle unserializable instances (`1767 `_) +- Add form error if source file contains invalid header (`1780 `_) +- Remove unneeded format method overrides (`1785 `_) +- Support dynamic selection of Resource class based on request property (`1787 `_) + +Fixes +##### + +- dynamic widget parameters for CharField fixes 'NOT NULL constraint' error in xlsx (`1485 `_) +- fix cooperation with adminsortable2 (`1633 `_) +- Removed unused method ``utils.original()`` +- Fix deprecated ``log_action`` method (`1673 `_) +- fix multiple inheritance not setting options (`1696 `_) +- Fix issue where declared Resource fields not defined in ``fields`` are still imported (`1702 `_) +- Fixed handling of :attr:`~import_export.exceptions.FieldError` during Admin import (`1755 `_) +- Fixed handling of django ``FieldError`` during Admin export (`1756 `_) +- Add check for type to :meth:`~import_export.widgets.Widget.render` (`1757 `_) +- fix: YAML export does not work with SafeString (`1762 `_) +- fix: :meth:`~import_export.widgets.SimpleArrayWidget.render` crashes if value is ``None`` (`1771 `_) +- fix form not being passed to ``get_import_resource_kwargs()`` (`1789 `_) +- Fix: Missing default widget for ``PositiveBigIntegerField`` (`1795 `_) + +Development +########### + +- Refactor build process (`1630 `_) +- Refactored ``test_admin_integration()``: split into smaller test modules (`1662 `_) +- Refactored ``test_resources()``: split into smaller test modules (`1672 `_) +- Updated ``docker-compose`` command with latest version syntax in ``runtests.sh`` (`1686 `_) +- Refactored :mod:`~import_export.resources` into separate modules for ``declarative`` and ``options`` (`1695 `_) +- Refactored tests to remove dependencies between tests (`1703 `_) +- Handle python3.12 datetime deprecations (`1705 `_) +- Refactor ``test_resources.py`` into smaller modules (`1733 `_) +- Updated test coverage to include error row when ``collect_failed_rows`` is ``True`` (`1753 `_) +- Removed support for django 3.2 (`1790 `_) +- Added test for widgets generating by model fields `1795 `_) + +Documentation +############# + +- Clarified ``skip_diff`` documentation (`1655 `_) +- Improved documentation relating to validation on import (`1665 `_) +- Added FAQ entry for exporting large datasets (`1706 `_) +- Relocated admin integration section from advanced_usage.rst into new file (`1713 `_) +- Updated Admin integration documentation to clarify how to save custom form values (`1746 `_) + +Performance +########### + +- Fix slow export with ForeignKey id (`1717 `_) + +i18n +#### + +- updated translations for release-4 (`1775 `_) + +3.3.9 (2024-04-28) +------------------ + +- Update translations for Russian language (`1797 `_) + +3.3.8 (2024-04-08) +------------------ + +- Add additional django template block for extending import page (`1776 `_) + +3.3.7 (2024-02-03) +------------------ + +- Pass :meth:`~import_export.mixins.BaseExportMixin.get_export_resource_kwargs` to Resource constructor + :meth:`~import_export.admin.ExportMixin.export_action` (`1739 `_) +- Fix issue with model class passed to Resource constructor crashing on export (`1745 `_) +- Fix indentation for skip_row docstring (`1743 `_) +- Return ``kwargs`` by default from :meth:`~import_export.mixins.BaseImportExportMixin.get_resource_kwargs` (`1748 `_) + +3.3.6 (2024-01-10) +------------------ + +- Fix issue with highlight when using 'light' color scheme (`1728 `_) + +3.3.5 (2023-12-19) +------------------ + +- Remove unnecessary ChangeList queries to speed up export via Admin UI (`1715 `_) +- Respect color scheme override (`1720 `_) +- Update FAQ to cover skipping rows with validation errors (`1721 `_) + +3.3.4 (2023-12-09) +------------------ + +- Added support for django5 (`1634 `_) +- Show list of exported fields in Admin UI (`1685 `_) +- Added `CONTRIBUTING.md` +- Added support for python 3.12 (`1698 `_) +- Update Finnish translations (`1701 `_) + +3.3.3 (2023-11-11) +------------------ + +- :meth:`~import_export.admin.ExportActionMixin.export_admin_action` can be overridden by subclassing it in the + ``ModelAdmin`` (`1681 `_) + +3.3.2 (2023-11-09) +------------------ + +- Updated Spanish translations (`1639 `_) +- Added documentation and tests for retrieving instance information after import (`1643 `_) +- :meth:`~import_export.widgets.NumberWidget.render` returns ``None`` as empty string + if ``coerce_to_string`` is True (`1650 `_) +- Updated documentation to describe how to select for export in Admin UI (`1670 `_) +- Added catch for django5 deprecation warning (`1676 `_) +- Updated and compiled message files (`1678 `_) + +3.3.1 (2023-09-14) +------------------ + +- Added `.readthedocs.yaml` (`1625 `_) + +3.3.0 (2023-09-14) +------------------ + +Deprecations +############ + +- Remove 'escape output' deprecation (`1618 `_) + + - Removal of deprecated :ref:`IMPORT_EXPORT_ESCAPE_OUTPUT_ON_EXPORT`. + + - Deprecation of :ref:`IMPORT_EXPORT_ESCAPE_HTML_ON_EXPORT`. Refer to :ref:`installation` docs. + +Enhancements +############ + +- Refactoring and fix to support filtering exports (`1579 `_) +- Store ``instance`` and ``original`` object in :class:`~import_export.results.RowResult` (`1584 `_) +- Add customizable blocks in import.html (`1598 `_) +- Include 'allowed formats' settings (`1606 `_) +- Add kwargs to enable CharWidget to return values as strings (`1623 `_) + +Internationalization +#################### + +- Add Finnish translation (`1588 `_) +- Updated ru translation (`1604 `_) +- Fixed badly formatted translation string (`1622 `_) +- Remove 'escape output' deprecation (`1618 `_) + +Fixes +##### + +- Do not decode bytes when writing to MediaStorage (`1615 `_) +- Fix for cache entries not removed (`1621 `_) + +Development +########### + +- Added support for Django 4.2 (`1570 `_) +- Add automatic formatting and linting (`1571 `_) +- removed duplicate admin integration tests (`1616 `_) +- Removed support for python3.7 and django4.0 (past EOL) (`1618 `_) + +Documentation +############# + +- Updated documentation for interoperability with third party libraries (`1614 `_) + +3.2.0 (2023-04-12) +------------------ + +- Escape formulae on export to XLSX (`1568 `_) + + - This includes deprecation of :ref:`IMPORT_EXPORT_ESCAPE_OUTPUT_ON_EXPORT`. + + Refer to :ref:`installation` for alternatives. + + - :meth:`import_export.formats.TablibFormat.export()`: ``escape_output`` flag now deprecated in favour of + ``escape_html`` and ``escape_formulae``. + +- Refactor methods so that ``args`` are declared correctly (`1566 `_) + + - This includes deprecations to be aware of if you have overridden :meth:`~import_export.resources.Resource.export` + or :class:`~import_export.forms.ImportExportFormBase`. + + - ``export()``: If passing ``queryset`` as the first arg, ensure this is passed as a named parameter. + + - ``ImportExportFormBase``: If passing ``resources`` to ``__init__`` as the first arg, ensure this is + passed as a named parameter. + +- Updated ``setup.py`` (`1564 `_) +- Added ``SECURITY.md`` (`1563 `_) +- Updated FAQ to include workaround for `RelatedObjectDoesNotExist` exception (`1562 `_) +- Prevent error comparing m2m field of the new objects (`1560 `_) +- Add documentation for passing data from admin form to Resource (`1555 `_) +- Added new translations to Spanish and Spanish (Argentina) (`1552 `_) +- Pass kwargs to import_set function (`1448 `_) + +3.1.0 (2023-02-21) +------------------ + +- Float and Decimal widgets use LANGUAGE_CODE on export (`1501 `_) +- Add optional dehydrate method param (`1536 `_) + + - ``exceptions`` module has been undeprecated + +- Updated DE translation (`1537 `_) +- Add option for single step import via Admin Site (`1540 `_) +- Add support for m2m add (`1545 `_) +- collect errors on bulk operations (`1541 `_) + + - this change causes bulk import errors to be logged at DEBUG level not EXCEPTION. + +- Improve bulk import performance (`1539 `_) + + - ``raise_errors`` has been deprecated as a kwarg in ``import_row()`` + +- Reduce memory footprint during import (`1542 `_) +- documentation updates (`1533 `_) +- add detailed format parameter docstrings to ``DateWidget`` and ``TimeWidget`` (`1532 `_) +- tox updates (`1534 `_) +- fix xss vulnerability in html export (`1546 `_) + +3.0.2 (2022-12-13) +------------------ + +- Support Python 3.11 (`1508 `_) +- use ``get_list_select_related`` in ``ExportMixin`` (`1511 `_) +- bugfix: handle crash on start-up when ``change_list_template`` is a property (`1523 `_) +- bugfix: include instance info in row result when row is skipped (`1526 `_) +- bugfix: add ``**kwargs`` param to ``Resource`` constructor (`1527 `_) + +3.0.1 (2022-10-18) +------------------ + +- Updated ``django-import-export-ci.yml`` to fix node.js deprecation +- bugfix: ``DateTimeWidget.clean()`` handles tz aware datetime (`1499 `_) +- Updated translations for v3.0.0 release (`1500 `_) + +3.0.0 (2022-10-18) +------------------ + +Breaking changes +################ + +This release makes some minor changes to the public API. If you have overridden any methods from the ``resources`` or ``widgets`` modules, you may need to update your implementation to accommodate these changes. + +- Check value of ``ManyToManyField`` in ``skip_row()`` (`1271 `_) + - This fixes an issue where ManyToMany fields are not checked correctly in ``skip_row()``. This means that ``skip_row()`` now takes ``row`` as a mandatory arg. If you have overridden ``skip_row()`` in your own implementation, you will need to add ``row`` as an arg. + +- Bug fix: validation errors were being ignored when ``skip_unchanged`` is set (`1378 `_) + - If you have overridden ``skip_row()`` you can choose whether or not to skip rows if validation errors are present. The default behavior is to not to skip rows if there are validation errors during import. + +- Use 'create' flag instead of instance.pk (`1362 `_) + - ``import_export.resources.save_instance()`` now takes an additional mandatory argument: ``is_create``. If you have overridden ``save_instance()`` in your own code, you will need to add this new argument. + +- ``widgets``: Unused ``*args`` params have been removed from method definitions. (`1413 `_) + - If you have overridden ``clean()`` then you should update your method definition to reflect this change. + - ``widgets.ForeignKeyWidget`` / ``widgets.ManyToManyWidget``: The unused ``*args`` param has been removed from ``__init__()``. If you have overridden ``ForeignKeyWidget`` or ``ManyToManyWidget`` you may need to update your implementation to reflect this change. + +- Admin interface: Modified handling of import errors (`1306 `_) + - Exceptions raised during the import process are now presented as form errors, instead of being wrapped in a \ tag in the response. If you have any custom logic which uses the error written directly into the response, then this may need to be changed. + +- ImportForm: improve compatibility with previous signature (`1434 `_) + - Previous ``ImportForm`` implementation was based on Django's ``forms.Form``, if you have any custom ImportForm you now need to inherit from ``import_export.forms.ImportExportFormBase``. + +- Allow custom ``change_list_template`` in admin views using mixins (`1483 `_) + - If you are using admin mixins from this library in conjunction with code that overrides ``change_list_template`` (typically admin mixins from other libraries such as django-admin-sortable2 or reversion), object tools in the admin change list views may render differently now. + - If you have created a custom template which extends any import_export template, then this may now cause a recursion error (see `1415 `_) + +- ``import.html``: Added blocks to import template (`1488 `_) + - If you have made customizations to the import template then you may need to refactor these after the addition of block declarations. + +Deprecations +############ + +This release adds some deprecations which will be removed in a future release. + +- Add support for multiple resources in ModelAdmin. (`1223 `_) + + - The ``*Mixin.resource_class`` accepting single resource has been deprecated and the new ``*Mixin.resource_classes`` accepting subscriptable type (list, tuple, ...) has been added. + + - Same applies to all of the ``get_resource_class``, ``get_import_resource_class`` and ``get_export_resource_class`` methods. + +- Deprecated ``exceptions.py`` (`1372 `_) + +- Refactored form-related methods on ``ImportMixin`` / ``ExportMixin`` (`1147 `_) + + - The following are deprecated: + + - ``get_import_form()`` + + - ``get_confirm_import_form()`` + + - ``get_form_kwargs()`` + + - ``get_export_form()`` + +Enhancements +############ + +- Default format selections set correctly for export action (`1389 `_) +- Added option to store raw row values in each row's ``RowResult`` (`1393 `_) +- Add natural key support to ``ForeignKeyWidget`` (`1371 `_) +- Optimised default instantiation of ``CharWidget`` (`1414 `_) +- Allow custom ``change_list_template`` in admin views using mixins (`1483 `_) +- Added blocks to import template (`1488 `_) +- improve compatibility with previous ImportForm signature (`1434 `_) +- Refactored form-related methods on ``ImportMixin`` / ``ExportMixin`` (`1147 `_) +- Include custom form media in templates (`1038 `_) +- Remove unnecessary files generated when running tox locally (`1426 `_) + +Fixes +##### + +- Fixed Makefile coverage: added ``coverage combine`` +- Fixed handling of LF character when using ``CacheStorage`` (`1417 `_) +- bugfix: ``skip_row()`` handles M2M field when UUID pk used +- Fix broken link to tablib formats page (`1418 `_) +- Fix broken image ref in ``README.rst`` +- bugfix: ``skip_row()`` fix crash when model has m2m field and none is provided in upload (`1439 `_) +- Fix deprecation in example application: Added support for transitional form renderer (`1451 `_) + +Development +########### + +- Increased test coverage, refactored CI build to use tox (`1372 `_) + +Documentation +############# + +- Clarified issues around the usage of temporary storage (`1306 `_) + +2.9.0 (2022-09-14) +------------------ + +- Fix deprecation in example application: Added support for transitional form renderer (`1451 `_) +- Escape HTML output when rendering decoding errors (`1469 `_) +- Apply make_aware when the original file contains actual datetimes (`1478 `_) +- Automatically guess the format of the file when importing (`1460 `_) + +2.8.0 (2022-03-31) +------------------ + +- Updated import.css to support dark mode (`1318 `_) +- Fix crash when import_data() called with empty Dataset and ``collect_failed_rows=True`` (`1381 `_) +- Improve Korean translation (`1402 `_) +- Update example subclass widget code (`1407 `_) +- Drop support for python3.6, django 2.2, 3.0, 3.1 (`1408 `_) +- Add get_export_form() to ExportMixin (`1409 `_) + 2.7.1 (2021-12-23) ------------------ -- Removed `django_extensions` from example app settings (#1356) -- Added support for Django 4.0 (#1357) +- Removed ``django_extensions`` from example app settings (`1356 `_) +- Added support for Django 4.0 (`1357 `_) 2.7.0 (2021-12-07) ------------------ -- Big integer support for Integer widget (#788) -- Run compilemessages command to keep .mo files in sync (#1299) -- Added `skip_html_diff` meta attribute (#1329) -- Added python3.10 to tox and CI environment list (#1336) -- Add ability to rollback the import on validation error (#1339) -- Fix missing migration on example app (#1346) -- Fix crash when deleting via admin site (#1347) -- Use Github secret in CI script instead of hard-coded password (#1348) -- Documentation: correct error in example application which leads to crash (#1353) +- Big integer support for Integer widget (`788 `_) +- Run compilemessages command to keep .mo files in sync (`1299 `_) +- Added ``skip_html_diff`` meta attribute (`1329 `_) +- Added python3.10 to tox and CI environment list (`1336 `_) +- Add ability to rollback the import on validation error (`1339 `_) +- Fix missing migration on example app (`1346 `_) +- Fix crash when deleting via admin site (`1347 `_) +- Use Github secret in CI script instead of hard-coded password (`1348 `_) +- Documentation: correct error in example application which leads to crash (`1353 `_) 2.6.1 (2021-09-30) ------------------ -- Revert 'dark mode' css: causes issues in django2.2 (#1330) +- Revert 'dark mode' css: causes issues in django2.2 (`1330 `_) 2.6.0 (2021-09-15) ------------------ -- Added guard for null 'options' to fix crash (#1325) -- Updated import.css to support dark mode (#1323) -- Fixed regression where overridden mixin methods are not called (#1315) -- Fix xls/xlsx import of Time fields (#1314) -- Added support for 'to_encoding' attribute (#1311) -- Removed travis and replaced with github actions for CI (#1307) -- Increased test coverage (#1286) -- Fix minor date formatting issue for date with years < 1000 (#1285) -- Translate the zh_Hans missing part (#1279) -- Remove code duplication from mixins.py and admin.py (#1277) -- Fix example in BooleanWidget docs (#1276) -- Better support for Django main (#1272) -- don't test Django main branch with python36,37 (#1269) -- Support Django 3.2 (#1265) -- Correct typo in Readme (#1258) -- Rephrase logical clauses in docstrings (#1255) -- Support multiple databases (#1254) -- Update django master to django main (#1251) -- Add Farsi translated messages in the locale (#1249) -- Update Russian translations (#1244) -- Append export admin action using ModelAdmin.get_actions (#1241) -- Fix minor mistake in makemigrations command (#1233) -- Remove EOL Python 3.5 from CI (#1228) -- CachedInstanceLoader defaults to empty when import_id is missing (#1225) -- Add kwargs to import_row, import_object and import_field (#1190) -- Call load_workbook() with data_only flag (#1095) +- Added guard for null 'options' to fix crash (`1325 `_) +- Updated import.css to support dark mode (`1323 `_) +- Fixed regression where overridden mixin methods are not called (`1315 `_) +- Fix xls/xlsx import of Time fields (`1314 `_) +- Added support for 'to_encoding' attribute (`1311 `_) +- Removed travis and replaced with github actions for CI (`1307 `_) +- Increased test coverage (`1286 `_) +- Fix minor date formatting issue for date with years < 1000 (`1285 `_) +- Translate the zh_Hans missing part (`1279 `_) +- Remove code duplication from mixins.py and admin.py (`1277 `_) +- Fix example in BooleanWidget docs (`1276 `_) +- Better support for Django main (`1272 `_) +- don't test Django main branch with python36,37 (`1269 `_) +- Support Django 3.2 (`1265 `_) +- Correct typo in Readme (`1258 `_) +- Rephrase logical clauses in docstrings (`1255 `_) +- Support multiple databases (`1254 `_) +- Update django master to django main (`1251 `_) +- Add Farsi translated messages in the locale (`1249 `_) +- Update Russian translations (`1244 `_) +- Append export admin action using ModelAdmin.get_actions (`1241 `_) +- Fix minor mistake in makemigrations command (`1233 `_) +- Remove EOL Python 3.5 from CI (`1228 `_) +- CachedInstanceLoader defaults to empty when import_id is missing (`1225 `_) +- Add kwargs to import_row, import_object and import_field (`1190 `_) +- Call load_workbook() with data_only flag (`1095 `_) 2.5.0 (2020-12-30) ------------------ -- Changed the default value for ``IMPORT_EXPORT_CHUNK_SIZE`` to 100. (#1196) -- Add translation for Korean (#1218) +- Changed the default value for ``IMPORT_EXPORT_CHUNK_SIZE`` to 100. (`1196 `_) +- Add translation for Korean (`1218 `_) - Update linting, CI, and docs. @@ -74,94 +629,93 @@ Changelog 2.3.0 (2020-07-12) ------------------ -- Add missing translation keys for all languages (#1144) -- Added missing Portuguese translations (#1145) -- Add kazakh translations (#1161) -- Add bulk operations (#1149) +- Add missing translation keys for all languages (`1144 `_) +- Added missing Portuguese translations (`1145 `_) +- Add kazakh translations (`1161 `_) +- Add bulk operations (`1149 `_) 2.2.0 (2020-06-01) ------------------ -- Deal with importing a BooleanField that actually has `True`, `False`, and - `None` values. (#1071) -- Add row_number parameter to before_import_row, after_import_row and after_import_instance (#1040) -- Paginate queryset if Queryset.prefetch_related is used (#1050) +- Deal with importing a BooleanField that actually has ``True``, ``False``, and + ``None`` values. (`1071 `_) +- Add row_number parameter to before_import_row, after_import_row and after_import_instance (`1040 `_) +- Paginate queryset if Queryset.prefetch_related is used (`1050 `_) 2.1.0 (2020-05-02) ------------------ -- Fix DurationWidget handling of zero value (#1117) - -- Make import diff view only show headers for user visible fields (#1109) +- Fix DurationWidget handling of zero value (`1117 `_) -- Make confirm_form accessible in get_import_resource_kwargs and get_import_data_kwargs (#994, #1108) +- Make import diff view only show headers for user visible fields (`1109 `_) -- Initialize Decimal with text value, fix #1035 (#1039) +- Make confirm_form accessible in get_import_resource_kwargs and get_import_data_kwargs (`994 `_, `1108 `_) -- Adds meta flag 'skip_diff' to enable skipping of diff operations (#1045) +- Initialize Decimal with text value, fix #1035 (`1039 `_) -- Update docs (#1097, #1114, #1122, #969, #1083, #1093) +- Adds meta flag 'skip_diff' to enable skipping of diff operations (`1045 `_) +- Update docs (`1097 `_, `1114 `_, `1122 `_, `969 `_, `1083 `_, `1093 `_) 2.0.2 (2020-02-16) ------------------ -- Add support for tablib >= 1.0 (#1061) +- Add support for tablib >= 1.0 (`1061 `_) - Add ability to install a subset of tablib supported formats and save some automatic dependency installations (needs tablib >= 1.0) -- Use column_name when checking row for fields (#1056) +- Use column_name when checking row for fields (`1056 `_) 2.0.1 (2020-01-15) ------------------ -- Fix deprecated Django 3.0 function usage (#1054) +- Fix deprecated Django 3.0 function usage (`1054 `_) -- Pin tablib version to not use new major version (#1063) +- Pin tablib version to not use new major version (`1063 `_) -- Format field is always shown on Django 2.2 (#1007) +- Format field is always shown on Django 2.2 (`1007 `_) 2.0 (2019-12-03) ---------------- - Removed support for Django < 2.0 - Removed support for Python < 3.5 -- feat: Support for Postgres JSONb Field (#904) +- feat: Support for Postgres JSONb Field (`904 `_) 1.2.0 (2019-01-10) ------------------ -- feat: Better surfacing of validation errors in UI / optional model instance validation (#852) +- feat: Better surfacing of validation errors in UI / optional model instance validation (`852 `_) -- chore: Use modern setuptools in setup.py (#862) +- chore: Use modern setuptools in setup.py (`862 `_) -- chore: Update URLs to use https:// (#863) +- chore: Update URLs to use https:// (`863 `_) - chore: remove outdated workarounds - chore: Run SQLite tests with in-memory database -- fix: Change logging level (#832) +- fix: Change logging level (`832 `_) -- fix: Changed `get_instance()` return val (#842) +- fix: Changed ``get_instance()`` return val (`842 `_) 1.1.0 (2018-10-02) ------------------ -- fix: Django2.1 ImportExportModelAdmin export (#797) (#819) +- fix: Django2.1 ImportExportModelAdmin export (`797 `_, `819 `_) - setup: add django2.1 to test matrix -- JSONWidget for jsonb fields (#803) +- JSONWidget for jsonb fields (`803 `_) -- Add ExportActionMixin (#809) +- Add ExportActionMixin (`809 `_) -- Add Import Export Permissioning #608 (#804) +- Add Import Export Permissioning #608 (`804 `_) -- write_to_tmp_storage() for import_action() (#781) +- write_to_tmp_storage() for import_action() (`781 `_) -- follow relationships on ForeignKeyWidget #798 +- follow relationships on ForeignKeyWidget (`798 `_) - Update all pypi.python.org URLs to pypi.org @@ -169,198 +723,198 @@ Changelog - added unicode support for TSV for python 2 -- Added ExportViewMixin (#692) +- Added ExportViewMixin (`692 `_) 1.0.1 (2018-05-17) ------------------ -- Make deep copy of fileds from class attr to instance attr (#550) +- Make deep copy of fields from class attr to instance attr (`550 `_) -- Fix #612: NumberWidget.is_empty() should strip the value if string type (#613) +- Fix #612: NumberWidget.is_empty() should strip the value if string type (`613 `_) -- Fix #713: last day isn't included in results qs (#779) +- Fix #713: last day isn't included in results qs (`779 `_) -- use Python3 compatible MySql driver in development (#706) +- use Python3 compatible MySql driver in development (`706 `_) -- fix: warning U mode is deprecated in python 3 (#776) +- fix: warning U mode is deprecated in python 3 (`776 `_) -- refactor: easier overridding widgets and default field (#769) +- refactor: easier overriding widgets and default field (`769 `_) -- Updated documentation regardign declaring fields (#735) +- Updated documentation regarding declaring fields (`735 `_) -- custom js for action form also handles grappelli (#719) +- custom js for action form also handles grappelli (`719 `_) -- Use 'verbose_name' in breadcrumbs to match Django default (#732) +- Use 'verbose_name' in breadcrumbs to match Django default (`732 `_) -- Add Resource.get_diff_class() (#745) +- Add Resource.get_diff_class() (`745 `_) -- Fix and add polish translation (#747) +- Fix and add polish translation (`747 `_) -- Restore raise_errors to before_import (#749) +- Restore raise_errors to before_import (`749 `_) 1.0.0 (2018-02-13) ------------------ -- Switch to semver versioning (#687) +- Switch to semver versioning (`687 `_) -- Require Django>=1.8 (#685) +- Require Django>=1.8 (`685 `_) -- upgrade tox configuration (#737) +- upgrade tox configuration (`737 `_) 0.7.0 (2018-01-17) ------------------ -- skip_row override example (#702) +- skip_row override example (`702 `_) -- Testing against Django 2.0 should not fail (#709) +- Testing against Django 2.0 should not fail (`709 `_) -- Refactor transaction handling (#690) +- Refactor transaction handling (`690 `_) -- Resolves #703 fields shadowed (#703) +- Resolves #703 fields shadowed (`703 `_) -- discourage installation as a zipped egg (#548) +- discourage installation as a zipped egg (`548 `_) -- Fixed middleware settings in test app for Django 2.x (#696) +- Fixed middleware settings in test app for Django 2.x (`696 `_) 0.6.1 (2017-12-04) ------------------ -- Refactors and optimizations (#686, #632, #684, #636, #631, #629, #635, #683) +- Refactors and optimizations (`686 `_, `632 `_, `684 `_, `636 `_, `631 `_, `629 `_, `635 `_, `683 `_) -- Travis tests for Django 2.0.x (#691) +- Travis tests for Django 2.0.x (`691 `_) 0.6.0 (2017-11-23) ------------------ -- Refactor import_row call by using keyword arguments (#585) +- Refactor import_row call by using keyword arguments (`585 `_) -- Added {{ block.super }} call in block bodyclass in admin/base_site.html (#582) +- Added {{ block.super }} call in block bodyclass in admin/base_site.html (`582 `_) -- Add support for the Django DurationField with DurationWidget (#575) +- Add support for the Django DurationField with DurationWidget (`575 `_) -- GitHub bmihelac -> django-import-export Account Update (#574) +- GitHub bmihelac -> django-import-export Account Update (`574 `_) -- Add intersphinx links to documentation (#572) +- Add intersphinx links to documentation (`572 `_) -- Add Resource.get_import_fields() (#569) +- Add Resource.get_import_fields() (`569 `_) -- Fixed readme mistake (#568) +- Fixed readme mistake (`568 `_) -- Bugfix/fix m2m widget clean (#515) +- Bugfix/fix m2m widget clean (`515 `_) -- Allow injection of context data for template rendered by import_action() and export_action() (#544) +- Allow injection of context data for template rendered by import_action() and export_action() (`544 `_) -- Bugfix/fix exception in generate_log_entries() (#543) +- Bugfix/fix exception in generate_log_entries() (`543 `_) -- Process import dataset and result in separate methods (#542) +- Process import dataset and result in separate methods (`542 `_) -- Bugfix/fix error in converting exceptions to strings (#526) +- Bugfix/fix error in converting exceptions to strings (`526 `_) -- Fix admin integration tests for the new "Import finished..." message, update Czech translations to 100% coverage. (#596) +- Fix admin integration tests for the new "Import finished..." message, update Czech translations to 100% coverage. (`596 `_) -- Make import form type easier to override (#604) +- Make import form type easier to override (`604 `_) -- Add saves_null_values attribute to Field to control whether null values are saved on the object (#611) +- Add saves_null_values attribute to Field to control whether null values are saved on the object (`611 `_) -- Add Bulgarian translations (#656) +- Add Bulgarian translations (`656 `_) -- Add django 1.11 to TravisCI (#621) +- Add django 1.11 to TravisCI (`621 `_) -- Make Signals code example format correctly in documentation (#553) +- Make Signals code example format correctly in documentation (`553 `_) -- Add Django as requirement to setup.py (#634) +- Add Django as requirement to setup.py (`634 `_) -- Update import of reverse for django 2.x (#620) +- Update import of reverse for django 2.x (`620 `_) -- Add Django-version classifiers to setup.py’s CLASSIFIERS (#616) +- Add Django-version classifiers to setup.py’s CLASSIFIERS (`616 `_) -- Some fixes for Django 2.0 (#672) +- Some fixes for Django 2.0 (`672 `_) -- Strip whitespace when looking up ManyToMany fields (#668) +- Strip whitespace when looking up ManyToMany fields (`668 `_) -- Fix all ResourceWarnings during tests in Python 3.x (#637) +- Fix all ResourceWarnings during tests in Python 3.x (`637 `_) -- Remove downloads count badge from README since shields.io no longer supports it for PyPi (#677) +- Remove downloads count badge from README since shields.io no longer supports it for PyPi (`677 `_) -- Add coveralls support and README badge (#678) +- Add coveralls support and README badge (`678 `_) 0.5.1 (2016-09-29) ------------------ -- French locale not in pypi (#524) +- French locale not in pypi (`524 `_) -- Bugfix/fix undefined template variables (#519) +- Bugfix/fix undefined template variables (`519 `_) 0.5.0 (2016-09-01) ------------------ -- Hide default value in diff when importing a new instance (#458) +- Hide default value in diff when importing a new instance (`458 `_) -- Append rows to Result object via function call to allow overriding (#462) +- Append rows to Result object via function call to allow overriding (`462 `_) -- Add get_resource_kwargs to allow passing request to resource (#457) +- Add get_resource_kwargs to allow passing request to resource (`457 `_) -- Expose Django user to get_export_data() and export() (#447) +- Expose Django user to get_export_data() and export() (`447 `_) -- Add before_export and after_export hooks (#449) +- Add before_export and after_export hooks (`449 `_) -- fire events post_import, post_export events (#440) +- fire events post_import, post_export events (`440 `_) - add **kwargs to export_data / create_dataset -- Add before_import_row() and after_import_row() (#452) +- Add before_import_row() and after_import_row() (`452 `_) -- Add get_export_fields() to Resource to control what fields are exported (#461) +- Add get_export_fields() to Resource to control what fields are exported (`461 `_) -- Control user-visible fields (#466) +- Control user-visible fields (`466 `_) - Fix diff for models using ManyRelatedManager -- Handle already cleaned objects (#484) +- Handle already cleaned objects (`484 `_) -- Add after_import_instance hook (#489) +- Add after_import_instance hook (`489 `_) -- Use optimized xlsx reader (#482) +- Use optimized xlsx reader (`482 `_) -- Adds resource_class of BookResource (re-adds) in admin docs (#481) +- Adds resource_class of BookResource (re-adds) in admin docs (`481 `_) -- Require POST method for process_import() (#478) +- Require POST method for process_import() (`478 `_) -- Add SimpleArrayWidget to support use of django.contrib.postgres.fields.ArrayField (#472) +- Add SimpleArrayWidget to support use of django.contrib.postgres.fields.ArrayField (`472 `_) -- Add new Diff class (#477) +- Add new Diff class (`477 `_) -- Fix #375: add row to widget.clean(), obj to widget.render() (#479) +- Fix #375: add row to widget.clean(), obj to widget.render() (`479 `_) -- Restore transactions for data import (#480) +- Restore transactions for data import (`480 `_) -- Refactor the import-export templates (#496) +- Refactor the import-export templates (`496 `_) -- Update doc links to the stable version, update rtfd to .io (#507) +- Update doc links to the stable version, update rtfd to .io (`507 `_) -- Fixed typo in the Czech translation (#495) +- Fixed typo in the Czech translation (`495 `_) 0.4.5 (2016-04-06) ------------------ -- Add FloatWidget, use with model fields models.FloatField (#433) +- Add FloatWidget, use with model fields models.FloatField (`433 `_) -- Fix default values in fields (#431, #364) +- Fix default values in fields (`431 `_, `364 `_) - Field constructor `default` argument is NOT_PROVIDED instead of None - Field clean method checks value against `Field.empty_values` [None, ''] + Field constructor ``default`` argument is NOT_PROVIDED instead of None + Field clean method checks value against ``Field.empty_values`` [None, ''] 0.4.4 (2016-03-22) ------------------ -- FIX: No static/ when installed via pip #427 +- FIX: No static/ when installed via pip (`427 `_) - Add total # of imports and total # of updates to import success msg @@ -368,15 +922,15 @@ Changelog 0.4.3 (2016-03-08) ------------------ -- fix MediaStorage does not respect the read_mode parameter (#416) +- fix MediaStorage does not respect the read_mode parameter (`416 `_) -- Reset SQL sequences when new objects are imported (#59) +- Reset SQL sequences when new objects are imported (`59 `_) -- Let Resource rollback if import throws exception (#377) +- Let Resource rollback if import throws exception (`377 `_) -- Fixes error when a single value is stored in m2m relation field (#177) +- Fixes error when a single value is stored in m2m relation field (`177 `_) -- Add support for django.db.models.TimeField (#381) +- Add support for django.db.models.TimeField (`381 `_) 0.4.2 (2015-12-18) @@ -388,7 +942,7 @@ Changelog 0.4.1 (2015-12-11) ------------------ -- fix for fields with a dyanmic default callable (#360) +- fix for fields with a dyanmic default callable (`360 `_) 0.4.0 (2015-12-02) @@ -396,7 +950,7 @@ Changelog - Add Django 1.9 support -- Django 1.4 is not supported (#348) +- Django 1.4 is not supported (`348 `_) 0.3.1 (2015-11-20) @@ -408,33 +962,33 @@ Changelog 0.3 (2015-11-20) ---------------- -- FIX: importing csv UnicodeEncodeError introduced in 0.2.9 (#347) +- FIX: importing csv UnicodeEncodeError introduced in 0.2.9 (`347 `_) 0.2.9 (2015-11-12) ------------------ -- Allow Field.save() relation following (#344) +- Allow Field.save() relation following (`344 `_) -- Support default values on fields (and models) (#345) +- Support default values on fields (and models) (`345 `_) -- m2m widget: allow trailing comma (#343) +- m2m widget: allow trailing comma (`343 `_) -- Open csv files as text and not binary (#127) +- Open csv files as text and not binary (`127 `_) 0.2.8 (2015-07-29) ------------------ -- use the IntegerWidget for database-fields of type BigIntegerField (#302) +- use the IntegerWidget for database-fields of type BigIntegerField (`302 `_) -- make datetime timezone aware if USE_TZ is True (#283). +- make datetime timezone aware if USE_TZ is True (`283 `_). -- Fix 0 is interpreted as None in number widgets (#274) +- Fix 0 is interpreted as None in number widgets (`274 `_) -- add possibility to override tmp storage class (#133, #251) +- add possibility to override tmp storage class (`133 `_, `251 `_) -- better error reporting (#259) +- better error reporting (`259 `_) 0.2.7 (2015-05-04) @@ -442,34 +996,34 @@ Changelog - Django 1.8 compatibility -- add attribute inheritance to Resource (#140) +- add attribute inheritance to Resource (`140 `_) -- make the filename and user available to import_data (#237) +- make the filename and user available to import_data (`237 `_) -- Add to_encoding functionality (#244) +- Add to_encoding functionality (`244 `_) -- Call before_import before creating the instance_loader - fixes #193 +- Call before_import before creating the instance_loader - fixes (`193 `_) 0.2.6 (2014-10-09) ------------------ -- added use of get_diff_headers method into import.html template (#158) +- added use of get_diff_headers method into import.html template (`158 `_) - Try to use OrderedDict instead of SortedDict, which is deprecated in - Django 1.7 (#157) + Django 1.7 (`157 `_) - fixed #105 unicode import -- remove invalid form action "form_url" #154 +- remove invalid form action "form_url" (`154 `_) 0.2.5 (2014-10-04) ------------------ -- Do not convert numeric types to string (#149) +- Do not convert numeric types to string (`149 `_) -- implement export as an admin action (#124) +- implement export as an admin action (`124 `_) 0.2.4 (2014-09-18) @@ -486,7 +1040,7 @@ Changelog - Improve error messages -- FIX: Properly handle NullBoleanField (#115) - Backward Incompatible Change +- FIX: Properly handle NullBoleanField (`115 `_) - Backward Incompatible Change previously None values were handled as false @@ -495,7 +1049,7 @@ Changelog - Add separator and field keyword arguments to ManyToManyWidget -- FIX: No support for dates before 1900 (#93) +- FIX: No support for dates before 1900 (`93 `_) 0.2.2 (2014-04-18) @@ -509,7 +1063,7 @@ Changelog 0.2.1 (2014-02-20) ------------------ -- FIX import_file_name form field can be use to access the filesystem (#65) +- FIX import_file_name form field can be use to access the filesystem (`65 `_) 0.2.0 (2014-01-30) @@ -521,29 +1075,29 @@ Changelog 0.1.6 (2014-01-21) ------------------ -* Additional hooks for customizing the workflow (#61) +* Additional hooks for customizing the workflow (`61 `_) 0.1.5 (2013-11-29) ------------------ -* Prevent queryset caching when exporting (#44) +* Prevent queryset caching when exporting (`44 `_) -* Allow unchanged rows to be skipped when importing (#30) +* Allow unchanged rows to be skipped when importing (`30 `_) -* Update tests for Django 1.6 (#57) +* Update tests for Django 1.6 (`57 `_) * Allow different ``ResourceClass`` to be used in ``ImportExportModelAdmin`` - (#49) + (`49 `_) 0.1.4 ----- -* Use `field_name` instead of `column_name` for field dehydration, FIX #36 +* Use ``field_name`` instead of ``column_name`` for field dehydration, FIX (`36 `_) -* Handle OneToOneField, FIX #17 - Exception when attempting access something +* Handle OneToOneField, FIX (`17 `_) - Exception when attempting access something on the related_name. -* FIX #23 - export filter not working +* export filter not working (`23 `_) 0.1.3 ----- diff --git a/docs/conf.py b/docs/conf.py index f753f30bf..f5700b1c5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,91 +1,55 @@ import os import sys +from importlib.metadata import version + +import django # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('.')) -sys.path.append(os.path.abspath('..')) -sys.path.append(os.path.abspath('../tests')) -os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' - -import django +sys.path.append(os.path.abspath(".")) +sys.path.append(os.path.abspath("..")) +sys.path.append(os.path.abspath("../tests")) +os.environ["DJANGO_SETTINGS_MODULE"] = "settings" django.setup() # -- General configuration ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.autosectionlabel", +] + +autoclass_content = "both" + +autosectionlabel_prefix_document = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'django-import-export' -copyright = '2012–2020, Bojan Mihelac' +project = "django-import-export" +copyright = "2012–2024, Bojan Mihelac and others." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -# -try: - from import_export import __version__ - - # The short X.Y version. - version = '.'.join(__version__.split('.')[:2]) - # The full version, including alpha/beta/rc tags. - release = __version__ -except ImportError: - version = release = 'dev' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False +release = version("django-import-export") +# for example take major/minor +version = ".".join(release.split(".")[:2]) # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - +pygments_style = "sphinx" # -- Options for HTML output --------------------------------------------------- @@ -93,145 +57,63 @@ # a list of builtin themes. html_theme = "sphinx_rtd_theme" -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +html_static_path = ["_static"] # Output file base name for HTML help builder. -htmlhelp_basename = 'django-import-export' - +htmlhelp_basename = "django-import-export" # -- Options for LaTeX output -------------------------------------------------- -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'django-import-export.tex', 'django-import-export Documentation', - 'Bojan Mihelac', 'manual'), + ( + "index", + "django-import-export.tex", + "django-import-export Documentation", + "Bojan Mihelac", + "manual", + ), ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'django-import-export', 'django-import-export Documentation', - ['Bojan Mihelac'], 1) + ( + "index", + "django-import-export", + "django-import-export Documentation", + ["Bojan Mihelac"], + 1, + ) ] -# If true, show URL addresses after external links. -#man_show_urls = False - # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'django-import-export', 'django-import-export Documentation', 'Bojan Mihelac', - 'django-import-export', 'Import/export data for Django', 'Miscellaneous'), + ( + "index", + "django-import-export", + "django-import-export Documentation", + "Bojan Mihelac", + "django-import-export", + "Import/export data for Django", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. texinfo_appendices = [] # intersphinx documentation -intersphinx_mapping = { - 'tablib': ('https://tablib.readthedocs.io/en/stable/', None) -} +intersphinx_mapping = {"tablib": ("https://tablib.readthedocs.io/en/stable/", None)} + +exclude_patterns = ["image_src"] diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 000000000..c8077fa02 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1,185 @@ +.. _contributing: + +############ +Contributing +############ + +django-import-export is open-source and, as such, grows (or shrinks) & improves in part +due to the community. Below are some guidelines on how to help with the project. + +By contributing you agree to abide by the +`Code of Conduct `_. + + +Philosophy +---------- + +* django-import-export is BSD-licensed. All contributed code must be either + + * the original work of the author, contributed under the BSD, or... + + * work taken from another project released under a BSD-compatible license. + +* GPL'd (or similar) works are not eligible for inclusion. + +* django-import-export's git main branch should always be stable, production-ready & passing all tests. + +.. _question: + +Questions +--------- + +Please check the :ref:`common issues ` section of the :doc:`FAQ ` to see if your question already has an answer. + +For general questions about usage, we recommend posting to Stack Overflow, using the +`django-import-export `_ tag. Please search existing +answers to see if any match your problem. If not, post a new question including as much relevant detail as you can. +See `how to ask `_ for more details. + +For questions about the internals of the library, please raise an +`issue `_ and use the 'question' workflow. + +* First check to see if there is an existing issue which answers your question. + +* Remember to include as much detail as you can so that your question is answered in a timely manner. + +Guidelines For Reporting An Issue/Feature +----------------------------------------- + +So you've found a bug or have a great idea for a feature. Here are the steps you should take to help get it +added/fixed in django-import-export: + +* First, check to see if there's an existing + `issue `_ or + `pull request `_ for the bug/feature. + +* If there isn't one there, please file an issue. The ideal report includes: + + * A description of the problem/suggestion. + + * How to recreate the bug. + + * If relevant, including the versions of your: + + * Python interpreter + + * Django + + * tablib version + + * django-import-export + + * Optionally any of the other dependencies involved + + * Ideally, creating a pull request with a (failing) test case demonstrating what's wrong. This makes it easy for us + to reproduce and fix the problem. + +Guidelines For Contributing Code +-------------------------------- + +If you're ready to take the plunge and contribute back some code or documentation please consider the following: + +* Search existing issues and PRs to see if there are already any similar proposals. + +* For substantial changes, we recommend raising a question_ first so that we can offer any advice or pointers based on + previous experience. + +The process should look like: + +* Fork the project on GitHub into your own account. + +* Clone your copy of django-import-export. + +* Make a new branch in git & commit your changes there. + +* Push your new branch up to GitHub. + +* Again, ensure there isn't already an issue or pull request out there on it. + + * If there is and you feel you have a better fix, please take note of the issue number and mention it in your pull + request. + +* Create a new pull request (based on your branch), including what the problem/feature is, versions of your software + and referencing any related issues/pull requests. + +* We recommend setting up your editor to automatically indicate non-conforming styles (see `Development`_). + +In order to be merged into django-import-export, contributions must have the following: + +* A solid patch that: + + * is clear. + + * works across all supported versions of Python/Django. + + * follows the existing style of the code base (mostly PEP-8). + + * comments included as needed to explain why the code functions as it does + +* A test case that demonstrates the previous flaw that now passes with the included patch. + +* If it adds/changes a public API, it must also include documentation for those changes. + +* Must be appropriately licensed (see `Philosophy`_). + +* Adds yourself to the `AUTHORS`_ file. + +If your contribution lacks any of these things, they will have to be added by a core contributor before being merged +into django-import-export proper, which may take substantial time for the all-volunteer team to get to. + +.. _`AUTHORS`: https://github.com/django-import-export/django-import-export/blob/main/AUTHORS + +Development +----------- + +Formatting +^^^^^^^^^^ + +* All files should be formatted using the black auto-formatter. This will be run by pre-commit if configured. + +* The project repository includes an ``.editorconfig`` file. We recommend using a text editor with EditorConfig support + to avoid indentation and whitespace issues. + +* We allow up to 88 characters as this is the line length used by black. This check is included when you run flake8. + Documentation, comments, and docstrings should be wrapped at 79 characters, even though PEP 8 suggests 72. + +* To install pre-commit:: + + python -m pip install pre-commit + + Then run:: + + pre-commit install + +* If using ``git blame``, you can ignore commits which made large changes to the code base, such as reformatting. + Run this command from the base project directory:: + + git config blame.ignoreRevsFile .git-blame-ignore-revs + +.. _create_venv: + +Create virtual environment +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Once you have cloned and checked out the repository, you can install a new development environment as follows:: + + python -m venv django-import-export-venv + source django-import-export-venv/bin/activate + pip install .[tests] + +Run tests +^^^^^^^^^ + +You can run the test suite with:: + + make clean test + +Build documentation +^^^^^^^^^^^^^^^^^^^ + +To build a local version of the documentation:: + + pip install -r requirements/docs.txt + make build-html-doc + +The documentation will be present in ``docs/_build/html/index.html``. diff --git a/docs/export_workflow.rst b/docs/export_workflow.rst new file mode 100644 index 000000000..1103f6d92 --- /dev/null +++ b/docs/export_workflow.rst @@ -0,0 +1,57 @@ +==================== +Export workflow +==================== + +This document describes the export data workflow in detail, with hooks that enable +customization of the export process. + +Methods highlighted in yellow in the sequence diagram indicate public methods which can +be overridden. + +.. image:: _static/images/export_workflow.svg + :alt: Export workflow sequence diagram + :scale: 75% + +The :meth:`~import_export.resources.Resource.export` method retrieves a ``QuerySet`` +from the database and formats into a :class:`tablib.Dataset`. + +Various hook methods are defined to allow you to customize the export data. + +This is what happens when the method is invoked: + +#. The :meth:`~import_export.resources.Resource.export` method is passed an optional + queryset parameter. The ``kwargs`` dict can hold additional information used to + create the export, for example if called from the Admin UI. + +#. The :meth:`~import_export.resources.Resource.before_export` hook is called. + +#. If no ``QuerySet`` has been passed, then + :meth:`~import_export.resources.Resource.get_queryset` method is called. + +#. The :meth:`~import_export.resources.Resource.filter_export` hook is called. + You can override this method to modify the queryset for export. + +#. For each instance in the ``QuerySet``, + :meth:`~import_export.resources.Resource.export_resource` is called (with the + instance passed as a parameter). + +#. For each field defined in :attr:`~import_export.options.ResourceOptions.fields`: + + * :meth:`~import_export.resources.Resource.export_field` is called with the field and + instance as parameters. + + * If a :ref:`dehydrate` method is defined on + the ``Resource``, then this method is called to extract the field value, + Otherwise :meth:`~import_export.fields.Field.export` is called for each defined + field, with the instance passed as a parameter. + + * :meth:`~import_export.fields.Field.get_value` is called with the instance to + retrieve the export value from the instance.export + + * The field's widget :meth:`~import_export.widgets.Widget.render` method is called + to retrieve the export value. + +#. Each value is appended to a :class:`tablib.Dataset`. + +#. The :class:`tablib.Dataset` is returned from + :meth:`~import_export.resources.Resource.export`. diff --git a/docs/faq.rst b/docs/faq.rst new file mode 100755 index 000000000..b2799f7e8 --- /dev/null +++ b/docs/faq.rst @@ -0,0 +1,281 @@ +========================== +Frequently Asked Questions +========================== + +What's the best way to communicate a problem, question, or suggestion? +====================================================================== + +To submit a feature, to report a bug, or to ask a question, please refer our +:doc:`contributing guidelines `. + +How can I help? +=============== + +We welcome contributions from the community. + +You can help in the following ways: + +* Reporting bugs or issues. + +* Answering questions which arise on `Stack Overflow `_ or as Github issues. + +* Providing translations for UI text. + +* Suggesting features or changes. + +We encourage you to read the :doc:`contributing guidelines `. + +.. _common_issues: + +Common issues +============= + +.. _import_id_fields_error_on_import: + +``import_id_fields`` error on import +------------------------------------ + +The following error message can be seen on import: + + *The following fields are declared in 'import_id_fields' but are not present in the resource* + +This indicates that the Resource has not been configured correctly, and the import logic fails. Specifically, +the import process is attempting to use either the defined or default values for +:attr:`~import_export.options.ResourceOptions.import_id_fields` and no matching field has been detected in the resource +fields. See :ref:`advanced_usage:Create or update model instances`. + +In cases where you are deliberately using generated fields in ``import_id_fields`` and these fields are not present in +the dataset, then you need to modify the resource definition to accommodate this. +See :ref:`dynamic_fields`. + +How to handle double-save from Signals +-------------------------------------- + +This issue can apply if you have implemented post-save :ref:`advanced_usage:signals`, and you are using the import workflow in the Admin +interface. You will find that the post-save signal is called twice for each instance. The reason for this is that +the model ``save()`` method is called twice: once for the 'confirm' step and once for the 'import' step. The call +to ``save()`` during the 'confirm' step is necessary to prove that the object will be saved successfully, or to +report any exceptions in the Admin UI if save failed. After the 'confirm' step, the database transaction is rolled +back so that no changes are persisted. + +Therefore there is no way at present to stop ``save()`` being called twice, and there will always be two signal calls. +There is a workaround, which is to set a temporary flag on the instance being saved:: + + class BookResource(resources.ModelResource): + + def before_save_instance(self, instance, row, **kwargs): + # during 'confirm' step, dry_run is True + instance.dry_run = kwargs.get("dry_run", False) + + class Meta: + model = Book + fields = ('id', 'name') + +Your signal receiver can then include conditional logic to handle this flag:: + + @receiver(post_save, sender=Book) + def my_callback(sender, **kwargs): + instance = kwargs["instance"] + if getattr(instance, "dry_run"): + # no-op if this is the 'confirm' step + return + else: + # your custom logic here + # this will be executed only on the 'import' step + pass + +Further discussion `here `_ +and `here `_. + +How to dynamically set resource values +-------------------------------------- + +There can be use cases where you need a runtime or user supplied value to be passed to a Resource. +See :ref:`dynamically_set_resource_values`. + +How to set a value on all imported instances prior to persisting +---------------------------------------------------------------- + +If you need to set the same value on each instance created during import then refer to +:ref:`advanced_usage:How to set a value on all imported instances prior to persisting`. + +How to export from more than one table +-------------------------------------- + +In the usual configuration, a ``Resource`` maps to a single model. If you want to export data associated with +relations to that model, then these values can be defined in the ``fields`` declaration. +See :ref:`advanced_usage:Model relations`. + +How to import imagefield in excel cell +-------------------------------------- + +Please refer to `this issue `_. + +How to hide stack trace in UI error messages +-------------------------------------------- + +Please refer to :ref:`format_ui_error_messages`. + +Ids incremented twice during import +----------------------------------- + +When importing using the Admin site, it can be that the ids of the imported instances are different from those show +in the preview step. This occurs because the rows are imported during 'confirm', and then the transaction is rolled +back prior to the confirm step. Database implementations mean that sequence numbers may not be reused. + +Consider enabling :ref:`import_export_skip_admin_confirm` as a workaround. + +See `this issue `_ for more detailed +discussion. + +Not Null constraint fails when importing blank CharField +-------------------------------------------------------- + +This was an issue in v3 which is resolved in v4. The issue arises when importing from Excel because empty cells +are converted to ``None`` during import. If the import process attempted to save a null value then a 'NOT NULL' +exception was raised. + +In v4, initialization checks to see if the Django ``CharField`` has +`blank `_ set to ``True``. +If it does, then null values or empty strings are persisted as empty strings. + +If it is necessary to persist ``None`` instead of an empty string, then the ``allow_blank`` widget parameter can be +set:: + + class BookResource(resources.ModelResource): + + name = Field(widget=CharWidget(allow_blank=False)) + + class Meta: + model = Book + +See `this issue `_. + +Foreign key is null when importing +---------------------------------- + +It is possible to reference model relations by defining a field with the double underscore syntax. For example:: + + fields = ("author__name") + +This means that during export, the relation will be followed and the referenced field will be added correctly to the +export. + +This does not work during import because the reference may not be enough to identify the correct relation instance. +:class:`~import_export.widgets.ForeignKeyWidget` should be used during import. See the documentation explaining +:ref:`advanced_usage:Foreign Key relations`. + +How to customize export data +---------------------------- + +See the following responses on StackOverflow: + + * https://stackoverflow.com/a/55046474/39296 + * https://stackoverflow.com/questions/74802453/export-only-the-data-registered-by-the-user-django-import-export + +How to set export file encoding +------------------------------- + +If export produces garbled or unexpected output, you may need to set the export encoding. +See `this issue `_. + +How to create relation during import if it does not exist +--------------------------------------------------------- + +See :ref:`creating-non-existent-relations`. + +How to handle large file imports +-------------------------------- + +If uploading large files, you may encounter time-outs. +See :ref:`Using celery` and :ref:`bulk_import:Bulk imports`. + +Performance issues or unexpected behavior during import +------------------------------------------------------- + +This could be due to hidden rows in Excel files. +Hidden rows can be excluded using :ref:`import_export_import_ignore_blank_lines`. + +Refer to `this PR `_ for more information. + + +How to use field other than `id` in Foreign Key lookup +------------------------------------------------------ + +See :ref:`advanced_usage:Foreign key relations`. + +``RelatedObjectDoesNotExist`` exception during import +----------------------------------------------------- + +This can occur if a model defines a ``__str__()`` method which references a primary key or +foreign key relation, and which is ``None`` during import. There is a workaround to deal +with this issue. Refer to `this comment `_. + +'failed to assign change_list_template attribute' warning in logs +----------------------------------------------------------------- + +This indicates that the change_list_template attribute could not be set, most likely due to a clash with a third party +library. Refer to :ref:`interoperability`. + +How to skip rows with validation errors during import +----------------------------------------------------- + +Refer to `this comment `_. + +``FileNotFoundError`` during Admin import 'confirm' step +-------------------------------------------------------- + +You may receive an error during import such as:: + + FileNotFoundError [Errno 2] No such file or directory: '/tmp/tmp5abcdef' + +This usually happens because you are running the Admin site in a multi server or container environment. +During import, the import file has to be stored temporarily and then retrieved for storage after confirmation. +Therefore ``FileNotFoundError`` error can occur because the temp storage is not available to the server process after +confirmation. + +To resolve this, you should avoid using temporary file system storage in multi server environments. + +Refer to :ref:`import_confirmation` for more information. + +How to export large datasets +---------------------------- + +Large datasets can be exported in a number of ways, depending on data size and preferences. + +#. You can write custom scripts or `Admin commands `_ + to handle the export. Output can be written to a local filesystem, cloud bucket, network storage etc. + Refer to the documentation on exporting :ref:`programmatically`. +#. You can use the third party library :doc:`django-import-export-celery ` to handle long-running exports. +#. You can enable :ref:`export via admin action` and then select items for export page by page + in the Admin UI. This will work if you have a relatively small number of pages and can handle export to multiple + files. This method is suitable as a one-off or as a simple way to export large datasets via the Admin UI. + +How to change column names on export +------------------------------------ + +If you want to modify the names of the columns on export, you can do so by overriding +:meth:`~import_export.resources.Resource.get_export_headers`:: + + class BookResource(ModelResource): + + def get_export_headers(self, fields=None): + headers = super().get_export_headers(fields=fields) + for i, h in enumerate(headers): + if h == 'name': + headers[i] = "NEW COLUMN NAME" + return headers + + class Meta: + model = Book + +How to configure logging +------------------------ + +Refer to :ref:`logging configuration` for more information. + +Export to Excel gives ``IllegalCharacterError`` +----------------------------------------------- + +This occurs when your data contains a character which cannot be rendered in Excel. +You can configure import-export to :ref:`sanitize these characters`. diff --git a/docs/getting_started.rst b/docs/getting_started.rst index d95c30dc8..5c4c610d1 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -2,10 +2,19 @@ Getting started =============== +Introduction +============ + +This section describes how to get started with import-export. We'll use the :ref:`example application` +as a guide. + +import-export can be used programmatically as described here, or it can be integrated with the +:ref:`Django Admin interface`. + Test data ========= -There are test data files which can be used for importing in the `test/core/exports` directory. +There are sample files which can be used to test importing data in the `tests/core/exports` directory. The test models =============== @@ -44,10 +53,10 @@ For example purposes, we'll use a simplified book app. Here is our .. _base-modelresource: -Creating import-export resource +Creating a resource =============================== -To integrate `django-import-export` with our ``Book`` model, we will create a +To integrate import-export with our ``Book`` model, we will create a :class:`~import_export.resources.ModelResource` class in ``admin.py`` that will describe how this resource can be imported or exported:: @@ -59,181 +68,7 @@ describe how this resource can be imported or exported:: class BookResource(resources.ModelResource): class Meta: - model = Book - -Exporting data -============== - -Now that we have defined a :class:`~import_export.resources.ModelResource` class, -we can export books:: - - >>> from app.admin import BookResource - >>> dataset = BookResource().export() - >>> print(dataset.csv) - id,name,author,author_email,imported,published,price,categories - 2,Some book,1,,0,2012-12-05,8.85,1 - -Customize resource options -========================== - -By default :class:`~import_export.resources.ModelResource` introspects model -fields and creates :class:`~import_export.fields.Field`-attributes with an -appropriate :class:`~import_export.widgets.Widget` for each field. - -To affect which model fields will be included in an import-export -resource, use the ``fields`` option to whitelist fields:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - fields = ('id', 'name', 'price',) - -Or the ``exclude`` option to blacklist fields:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - exclude = ('imported', ) - -An explicit order for exporting fields can be set using the ``export_order`` -option:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - fields = ('id', 'name', 'author', 'price',) - export_order = ('id', 'price', 'author', 'name') - -The default field for object identification is ``id``, you can optionally set -which fields are used as the ``id`` when importing:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - import_id_fields = ('isbn',) - fields = ('isbn', 'name', 'author', 'price',) - -When defining :class:`~import_export.resources.ModelResource` fields it is -possible to follow model relationships:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - fields = ('author__name',) - -.. note:: - - Following relationship fields sets ``field`` as readonly, meaning - this field will be skipped when importing data. - -By default all records will be imported, even if no changes are detected. This -can be changed setting the ``skip_unchanged`` option. Also, the -``report_skipped`` option controls whether skipped records appear in the import -``Result`` object, and if using the admin whether skipped records will show in -the import preview page:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_unchanged = True - report_skipped = False - fields = ('id', 'name', 'price',) - -.. seealso:: - - :doc:`/api_resources` - - -Declaring fields -================ - -It is possible to override a resource field to change some of its -options:: - - from import_export.fields import Field - - class BookResource(resources.ModelResource): - published = Field(attribute='published', column_name='published_date') - - class Meta: - model = Book - -Other fields that don't exist in the target model may be added:: - - from import_export.fields import Field - - class BookResource(resources.ModelResource): - myfield = Field(column_name='myfield') - - class Meta: - model = Book - -.. seealso:: - - :doc:`/api_fields` - Available field types and options. - - -Advanced data manipulation on export -==================================== - -Not all data can be easily extracted from an object/model attribute. -In order to turn complicated data model into a (generally simpler) processed -data structure on export, ``dehydrate_`` method should be defined:: - - from import_export.fields import Field - - class BookResource(resources.ModelResource): - full_title = Field() - - class Meta: - model = Book - - def dehydrate_full_title(self, book): - book_name = getattr(book, "name", "unknown") - author_name = getattr(book.author, "name", "unknown") - return '%s by %s' % (book_name, author_name) - -In this case, the export looks like this: - - >>> from app.admin import BookResource - >>> dataset = BookResource().export() - >>> print(dataset.csv) - full_title,id,name,author,author_email,imported,published,price,categories - Some book by 1,2,Some book,1,,0,2012-12-05,8.85,1 - - -Customize widgets -================= - -A :class:`~import_export.resources.ModelResource` creates a field with a -default widget for a given field type. If the widget should be initialized -with different arguments, set the ``widgets`` dict. - -In this example widget, the ``published`` field is overridden to use a -different date format. This format will be used both for importing -and exporting resource. - -:: - - class BookResource(resources.ModelResource): - - class Meta: - model = Book - widgets = { - 'published': {'format': '%d.%m.%Y'}, - } - -.. seealso:: - - :doc:`/api_widgets` - available widget types and options. + model = Book # or 'core.Book' Importing data ============== @@ -256,12 +91,13 @@ Let's import some data! In the fourth line we use :func:`~import_export.resources.modelresource_factory` to create a default :class:`~import_export.resources.ModelResource`. -The ModelResource class created this way is equal to the one shown in the +The ``ModelResource`` class created this way is equal to the one shown in the example in section :ref:`base-modelresource`. In fifth line a :class:`~tablib.Dataset` with columns ``id`` and ``name``, and -one book entry, are created. A field for a primary key field (in this case, -``id``) always needs to be present. +one book entry, are created. A field (or combination of fields) which uniquely identifies an instance always needs to +be present. This is so that the import process can manage creates / updates. In this case, we use ``id``. +For more information, see :ref:`advanced_usage:Create or update model instances`. In the rest of the code we first pretend to import data using :meth:`~import_export.resources.Resource.import_data` and ``dry_run`` set, @@ -272,195 +108,46 @@ then check for any errors and actually import data this time. :doc:`/import_workflow` for a detailed description of the import workflow and its customization options. - Deleting data ------------- To delete objects during import, implement the :meth:`~import_export.resources.Resource.for_delete` method on your :class:`~import_export.resources.Resource` class. +You should add custom logic which will signify which rows are to be deleted. -The following is an example resource which expects a ``delete`` field in the -dataset. An import using this resource will delete model instances for rows -that have their column ``delete`` set to ``1``:: +For example, suppose you would like to have a field in the import dataset to indicate which rows should be deleted. +You could include a field called *delete* which has either a 1 or 0 value. + +In this case, declare the resource as follows:: class BookResource(resources.ModelResource): - delete = fields.Field(widget=widgets.BooleanWidget()) def for_delete(self, row, instance): - return self.fields['delete'].clean(row) + return row["delete"] == "1" class Meta: model = Book +If the delete flag is set on a *'new'* instance (i.e. the row does not already exist in the db) then the row will be +skipped. -Signals -======= - -To hook in the import export workflow, you can connect to ``post_import``, -``post_export`` signals:: - - from django.dispatch import receiver - from import_export.signals import post_import, post_export - - @receiver(post_import, dispatch_uid='balabala...') - def _post_import(model, **kwargs): - # model is the actual model instance which after import - pass - - @receiver(post_export, dispatch_uid='balabala...') - def _post_export(model, **kwargs): - # model is the actual model instance which after export - pass - - -.. _admin-integration: - -Admin integration -================= - -Exporting ---------- - -Exporting via list filters -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Admin integration is achieved by subclassing -:class:`~import_export.admin.ImportExportModelAdmin` or one of the available -mixins (:class:`~import_export.admin.ImportMixin`, -:class:`~import_export.admin.ExportMixin`, -:class:`~import_export.admin.ImportExportMixin`):: - - # app/admin.py - from .models import Book - from import_export.admin import ImportExportModelAdmin - - class BookAdmin(ImportExportModelAdmin): - resource_class = BookResource - - admin.site.register(Book, BookAdmin) - -.. figure:: _static/images/django-import-export-change.png - - A screenshot of the change view with Import and Export buttons. - -.. figure:: _static/images/django-import-export-import.png - - A screenshot of the import view. - -.. figure:: _static/images/django-import-export-import-confirm.png - - A screenshot of the confirm import view. +.. _exporting_data: +Exporting data +============== -Exporting via admin action -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Another approach to exporting data is by subclassing -:class:`~import_export.admin.ImportExportActionModelAdmin` which implements -export as an admin action. As a result it's possible to export a list of -objects selected on the change list page:: - - # app/admin.py - from import_export.admin import ImportExportActionModelAdmin - - class BookAdmin(ImportExportActionModelAdmin): - pass - - -.. figure:: _static/images/django-import-export-action.png - - A screenshot of the change view with Import and Export as an admin action. - -Note that to use the :class:`~import_export.admin.ExportMixin` or -:class:`~import_export.admin.ExportActionMixin`, you must declare this mixin -**before** ``admin.ModelAdmin``:: - - # app/admin.py - from django.contrib import admin - from import_export.admin import ExportActionMixin - - class BookAdmin(ExportActionMixin, admin.ModelAdmin): - pass - -Note that :class:`~import_export.admin.ExportActionMixin` is declared first in -the example above! - - -Importing ---------- - -It is also possible to enable data import via standard Django admin interface. -To do this subclass :class:`~import_export.admin.ImportExportModelAdmin` or use -one of the available mixins, i.e. :class:`~import_export.admin.ImportMixin`, or -:class:`~import_export.admin.ImportExportMixin`. Customizations are, of course, -possible. - - -Customize admin import forms -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -It is possible to modify default import forms used in the model admin. For -example, to add an additional field in the import form, subclass and extend the -:class:`~import_export.forms.ImportForm` (note that you may want to also -consider :class:`~import_export.forms.ConfirmImportForm` as importing is a -two-step process). - -To use the customized form(s), overload -:class:`~import_export.admin.ImportMixin` respective methods, i.e. -:meth:`~import_export.admin.ImportMixin.get_import_form`, and also -:meth:`~import_export.admin.ImportMixin.get_confirm_import_form` if need be. - -For example, imagine you want to import books for a specific author. You can -extend the import forms to include ``author`` field to select the author from. - -Customize forms:: - - from django import forms - - class CustomImportForm(ImportForm): - author = forms.ModelChoiceField( - queryset=Author.objects.all(), - required=True) - - class CustomConfirmImportForm(ConfirmImportForm): - author = forms.ModelChoiceField( - queryset=Author.objects.all(), - required=True) - -Customize ``ModelAdmin``:: - - class CustomBookAdmin(ImportMixin, admin.ModelAdmin): - resource_class = BookResource - - def get_import_form(self): - return CustomImportForm - - def get_confirm_import_form(self): - return CustomConfirmImportForm - - def get_form_kwargs(self, form, *args, **kwargs): - # pass on `author` to the kwargs for the custom confirm form - if isinstance(form, CustomImportForm): - if form.is_valid(): - author = form.cleaned_data['author'] - kwargs.update({'author': author.id}) - return kwargs - - - admin.site.register(Book, CustomBookAdmin) - -To further customize admin imports, consider modifying the following -:class:`~import_export.admin.ImportMixin` methods: -:meth:`~import_export.admin.ImportMixin.get_form_kwargs`, -:meth:`~import_export.admin.ImportMixin.get_import_resource_kwargs`, -:meth:`~import_export.admin.ImportMixin.get_import_data_kwargs`. - -Using the above methods it is possible to customize import form initialization -as well as importing customizations. +Now that we have defined a :class:`~import_export.resources.ModelResource` class, +we can export books:: + >>> from core.admin import BookResource + >>> dataset = BookResource().export() + >>> print(dataset.csv) + id,name,author,author_email,imported,published,price,categories + 2,Some book,1,,0,2012-12-05,8.85,1 -.. seealso:: +.. warning:: - :doc:`/api_admin` - available mixins and options. + Data exported programmatically is not sanitized for malicious content. + You will need to understand the implications of this and handle accordingly. + See :ref:`admin_security`. diff --git a/docs/image_src/export_workflow.txt b/docs/image_src/export_workflow.txt new file mode 100644 index 000000000..221eee38e --- /dev/null +++ b/docs/image_src/export_workflow.txt @@ -0,0 +1,58 @@ +# see import_workflow.txt + +participant Resource +participant Field +participant Widget +participant tablib.Dataset + +Resource->Resource:""export(queryset=None, \*\*kwargs)"" +activate Resource + +Resource->Resource:""before_export(queryset=None, \*\*kwargs)"" + +note over Resource: A Queryset instance can be passed into export().\nIf no Queryset is passed, get_queryset() is called. + +Resource->Resource:""get_queryset()"" +activate Resource #lightblue +Resource<--Resource:""Queryset"" +deactivate Resource + +Resource->Resource:""filter_export(queryset, \*\*kwargs)"" +activate Resource #lightblue +Resource<--Resource:""Queryset"" +deactivate Resource + +loop #pink each row in Queryset + +Resource->Resource:""export_resource(instance)"" +activate Resource #lightblue + +loop #green each field in export field list +Resource->Resource:""export_field(field, instance)"" +activate Resource #lightpink +Resource->Field:""export(instance)"" +activate Field #lightblue +note over Field: An optional callable can be defined instead of export().\n See 'dehydrate' methods in docs. +Field->Field:""get_value(instance)"" +activate Field #lightpink +note over Field: Get the field's value from the instance. +Field<--Field:""<>"" +Field->Widget:""render(value, instance)"" +activate Widget +note over Widget: Format field value into a\nstring or value as required. +Field<--Widget:""<> +deactivate Widget +deactivate Field +Resource<--Field:""<>"" +deactivate Field +deactivate Resource +end +deactivate Resource + +Resource->tablib.Dataset:""append()"" +end + +Resource<--Resource:""<>"" +deactivate Resource + + diff --git a/docs/image_src/import_workflow.txt b/docs/image_src/import_workflow.txt new file mode 100644 index 000000000..f4e9b9cd4 --- /dev/null +++ b/docs/image_src/import_workflow.txt @@ -0,0 +1,93 @@ +# Source for import workflow sequence diagram. +# This source can be loaded into sequencediagram.org (web app). +# If this source is changed, then the svg image in 'images/' dir should be regenerated +# by exporting from the web app. + +participant Resource +participant Result +participant RowResult +participant InstanceLoader +participant Field +participant Widget + +Resource->Resource:""import_data(data, \*\*kwargs)"" +activate Resource +Resource->Result:""__init__()"" +activate Result +Resource<--Result:""Result"" +deactivate Result + +Resource->Resource:""before_import(dataset, \*\*kwargs)"" + +loop #pink each row in dataset +Resource->Resource:""import_row(row, instance_loader, \*\*kwargs)"" +activate Resource #lightblue +Resource->RowResult:""__init__()"" +activate RowResult +Resource<--RowResult:""RowResult"" +deactivate RowResult + +Resource->Resource:""before_import_row(row, \*\*kwargs)"" + + +Resource->Resource:""get_or_init_instance(instance_loader, \n row)"" +activate Resource #lightgrey + +Resource->Resource:""get_instance(instance_loader, row)"" +activate Resource #lightgreen +Resource->InstanceLoader:""get_instance(row)"" +activate InstanceLoader +note over InstanceLoader: Existing Instance is returned if exists,\n otherwise a new Instance is created. + +Resource<--InstanceLoader:""Instance"" +deactivate InstanceLoader +Resource-->Resource:""Instance"" +deactivate Resource +Resource-->Resource:""Instance, bool"" +deactivate Resource + +Resource->Resource:""after_init_instance(instance, new, row \n \*\*kwargs)"" + +Resource->Resource:""for_delete(row, instance)"" +activate Resource #lightgrey +note over Resource: If True, row is deleted. +Resource-->Resource:""bool"" + +deactivate Resource + +Resource->Resource:""import_instance(instance, row, \*\*kwargs)"" +activate Resource #lightgrey + +loop #green each field in row +Resource->Field:""save(instance, row, is_m2m, \*\*kwargs)"" +note over Field: save logic determines the correct value\nand sets attribute on instance. +Field->Field:""clean(row, \*\*kwargs)"" +activate Field +Field->Widget:""clean(value, row, \*\*kwargs)"" +activate Widget +Field<--Widget:""value"" +deactivate Widget +deactivate Field +end + +deactivate Resource + +Resource->Resource:""skip_row(instance, original, row, \n import_validation_errors)"" +activate Resource #lightgrey +note over Resource: If True, row is skipped. +Resource-->Resource:""bool"" +deactivate Resource + +Resource->Resource:""validate_instance(instance, \n import_validation_errors)"" +Resource->Resource:""save_instance(instance, row, new, \n \*\*kwargs)"" +Resource->Resource:""save_m2m(instance, row, \*\*kwargs)"" + +Resource->Resource:""after_import_row(row, row_result, \n \*\*kwargs)"" + + +Resource-->Resource:""RowResult"" +deactivate Resource +end + +Resource<--Resource:""Result"" +deactivate Resource diff --git a/docs/import_workflow.rst b/docs/import_workflow.rst index e01909f30..37f9381b8 100644 --- a/docs/import_workflow.rst +++ b/docs/import_workflow.rst @@ -1,42 +1,33 @@ ==================== -Import data workflow +Import workflow ==================== -This document describes the import data workflow in detail, with hooks that -enable customization of the import process. The central aspect of the import -process is a resource's :meth:`~import_export.resources.Resource.import_data` -method which is explained below. +This document describes the import data workflow in detail, with hooks that enable +customization of the import process. -.. function:: import_data(dataset, dry_run=False, raise_errors=False) +Methods highlighted in yellow in the sequence diagram indicate public methods which can +be overridden. - The :meth:`~import_export.resources.Resource.import_data` method of - :class:`~import_export.resources.Resource` is responsible for importing data - from a given dataset. - - ``dataset`` is required and expected to be a :class:`tablib.Dataset` with - a header row. - - ``dry_run`` is a Boolean which determines if changes to the database are - made or if the import is only simulated. It defaults to ``False``. - - ``raise_errors`` is a Boolean. If ``True``, import should raise errors. - The default is ``False``, which means that eventual errors and traceback - will be saved in ``Result`` instance. +.. image:: _static/images/import_workflow.svg + :alt: Import workflow sequence diagram + :scale: 75% +The :meth:`~import_export.resources.Resource.import_data` method of +:class:`~import_export.resources.Resource` is responsible for importing data +from a given dataset. Refer to the method documentation for parameters to this method. This is what happens when the method is invoked: #. First, a new :class:`~import_export.results.Result` instance, which holds errors and other information gathered during the import, is initialized. - Then, an :class:`~import_export.instance_loaders.InstanceLoader` responsible + Then, an :class:`~import_export.instance_loaders.BaseInstanceLoader` responsible for loading existing instances is initialized. A different :class:`~import_export.instance_loaders.BaseInstanceLoader` can be specified - via :class:`~import_export.resources.ResourceOptions`'s + via :class:`~import_export.options.ResourceOptions`'s ``instance_loader_class`` attribute. A :class:`~import_export.instance_loaders.CachedInstanceLoader` can be used to - reduce number of database queries. See the `source - `_ + reduce number of database queries. See the :mod:`~import_export.instance_loaders` for available implementations. #. The :meth:`~import_export.resources.Resource.before_import` hook is called. @@ -45,81 +36,79 @@ This is what happens when the method is invoked: #. Each row of the to-be-imported dataset is processed according to the following steps: - #. The :meth:`~import_export.resources.Resource.before_import_row` hook is - called to allow for row data to be modified before it is imported - - #. :meth:`~import_export.resources.Resource.get_or_init_instance` is called - with current :class:`~import_export.instance_loaders.BaseInstanceLoader` - and current row of the dataset, returning an object and a Boolean - declaring if the object is newly created or not. + * The :meth:`~import_export.resources.Resource.before_import_row` hook is + called to allow for row data to be modified before it is imported. - If no object can be found for the current row, - :meth:`~import_export.resources.Resource.init_instance` is invoked to - initialize an object. + * :meth:`~import_export.resources.Resource.get_or_init_instance` is called + with current :class:`~import_export.instance_loaders.BaseInstanceLoader` + and current row of the dataset, returning an object and a Boolean + declaring if the object is newly created or not. - As always, you can override the implementation of - :meth:`~import_export.resources.Resource.init_instance` to customize - how the new object is created (i.e. set default values). + If no object can be found for the current row, + :meth:`~import_export.resources.Resource.init_instance` is invoked to + initialize an object. - #. :meth:`~import_export.resources.Resource.for_delete` is called to - determine if the passed ``instance`` - should be deleted. In this case, the import process for the current row - is stopped at this point. + As always, you can override the implementation of + :meth:`~import_export.resources.Resource.init_instance` to customize + how the new object is created (i.e. set default values). - #. If the instance was not deleted in the previous step, - :meth:`~import_export.resources.Resource.import_obj` is called with the - ``instance`` as current object, ``row`` as current row and ``dry run``. + * :meth:`~import_export.resources.Resource.for_delete` is called to + determine if the passed ``instance`` + should be deleted. In this case, the import process for the current row + is stopped at this point. - :meth:`~import_export.resources.Resource.import_field` is called for - each field in :class:`~import_export.resources.Resource` skipping many- - to-many fields. Many-to-many fields are skipped because they require - instances to have a primary key and therefore assignment is postponed to - when the object has already been saved. + * If the instance was not deleted in the previous step, + :meth:`~import_export.resources.Resource.import_row` is called with the + ``instance`` as current object instance, ``row`` as current row. - :meth:`~import_export.resources.Resource.import_field` in turn calls - :meth:`~import_export.fields.Field.save`, if ``Field.attribute`` is set - and ``Field.column_name`` exists in the given row. + :meth:`~import_export.resources.Resource.import_field` is called for + each field in :class:`~import_export.resources.Resource` skipping many- + to-many fields. Many-to-many fields are skipped because they require + instances to have a primary key and therefore assignment is postponed to + when the object has already been saved. - #. It then is determined whether the newly imported object is different - from the already present object and if therefore the given row should be - skipped or not. This is handled by calling - :meth:`~import_export.resources.Resource.skip_row` with ``original`` as - the original object and ``instance`` as the current object from the dataset. + :meth:`~import_export.resources.Resource.import_field` in turn calls + :meth:`~import_export.fields.Field.save`, if ``Field.attribute`` is set + and ``Field.column_name`` exists in the given row. - If the current row is to be skipped, ``row_result.import_type`` is set - to ``IMPORT_TYPE_SKIP``. + * It then is determined whether the newly imported object is different + from the already present object and if therefore the given row should be + skipped or not. This is handled by calling + :meth:`~import_export.resources.Resource.skip_row` with ``original`` as + the original object and ``instance`` as the current object from the dataset. - #. If the current row is not to be skipped, - :meth:`~import_export.resources.Resource.save_instance` is called and - actually saves the instance when ``dry_run`` is not set. + If the current row is to be skipped, ``row_result.import_type`` is set + to ``IMPORT_TYPE_SKIP``. - There are two hook methods (that by default do nothing) giving you the - option to customize the import process: + * If the current row is not to be skipped, + :meth:`~import_export.resources.Resource.save_instance` is called and + actually saves the instance when ``dry_run`` is not set. - * :meth:`~import_export.resources.Resource.before_save_instance` - * :meth:`~import_export.resources.Resource.after_save_instance` + There are two hook methods (that by default do nothing) giving you the + option to customize the import process: - Both methods receive ``instance`` and ``dry_run`` arguments. + * :meth:`~import_export.resources.Resource.before_save_instance` + * :meth:`~import_export.resources.Resource.after_save_instance` - #. :meth:`~import_export.resources.Resource.save_m2m` is called to save - many to many fields. + * :meth:`~import_export.resources.Resource.save_m2m` is called to save + many to many fields. - #. :class:`~import_export.results.RowResult` is assigned with a diff - between the original and the imported object fields, as well as and - ``import_type`` attribute which states whether the row is new, updated, - skipped or deleted. + * :class:`~import_export.results.RowResult` is assigned with a diff + between the original and the imported object fields, as well as and + ``import_type`` attribute which states whether the row is new, updated, + skipped or deleted. - If an exception is raised during row processing and - :meth:`~import_export.resources.Resource.import_data` was invoked with - ``raise_errors=False`` (which is the default) the particular traceback - is appended to :class:`~import_export.results.RowResult` as well. + If an exception is raised during row processing and + :meth:`~import_export.resources.Resource.import_row` was invoked with + ``raise_errors=False`` (which is the default) the particular traceback + is appended to :class:`~import_export.results.RowResult` as well. - If either the row was not skipped or the - :class:`~import_export.resources.Resource` is configured to report - skipped rows, the :class:`~import_export.results.RowResult` is appended - to the :class:`~import_export.results.Result` + If either the row was not skipped or the + :class:`~import_export.resources.Resource` is configured to report + skipped rows, the :class:`~import_export.results.RowResult` is appended + to the :class:`~import_export.results.Result` - #. The :meth:`~import_export.resources.Resource.after_import_row` hook is called + * The :meth:`~import_export.resources.Resource.after_import_row` hook is called #. The :class:`~import_export.results.Result` is returned. @@ -127,8 +116,8 @@ Transaction support ------------------- If transaction support is enabled, whole import process is wrapped inside -transaction and rollbacked or committed respectively. -All methods called from inside of ``import_data`` (create / delete / update) -receive ``False`` for ``dry_run`` argument. +transaction and rolled back or committed respectively. +All methods called from inside of :meth:`~import_export.resources.Resource.import_data` +(create / delete / update) receive ``False`` for ``dry_run`` argument. .. _Dataset: https://tablib.readthedocs.io/en/stable/api/#dataset-object diff --git a/docs/index.rst b/docs/index.rst index daf152284..451304335 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,16 +7,24 @@ exporting data with included admin integration. **Features:** - * support multiple formats (Excel, CSV, JSON, ... + * Import from / Export to multiple file formats + + * Manage import / export of object relations, data types + + * Handle create / update / delete / skip during imports + + * Extensible API + + * Support multiple formats (Excel, CSV, JSON, ... and everything else that `tablib`_ supports) - * admin integration for importing + * Bulk import - * preview import changes + * Admin integration for importing / exporting - * admin integration for exporting + * Preview import changes - * export data respecting admin filters + * Export data respecting admin filters .. figure:: _static/images/django-import-export-change.png @@ -29,9 +37,17 @@ exporting data with included admin integration. installation getting_started + advanced_usage + admin_integration import_workflow + export_workflow bulk_import + management_commands celery + testing + faq + screenshots + release_notes changelog .. toctree:: @@ -43,9 +59,17 @@ exporting data with included admin integration. api_widgets api_fields api_instance_loaders + api_mixins api_tmp_storages api_results api_forms + api_exceptions + +.. toctree:: + :maxdepth: 2 + :caption: Developers + + contributing .. _`tablib`: https://github.com/jazzband/tablib diff --git a/docs/installation.rst b/docs/installation.rst index c1a7826c4..f1d4530c6 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -2,24 +2,26 @@ Installation and configuration ============================== -django-import-export is available on the Python Package Index (PyPI), so it +import-export is available on the Python Package Index (PyPI), so it can be installed with standard Python tools like ``pip`` or ``easy_install``:: - $ pip install django-import-export + pip install django-import-export -This will automatically install many formats supported by tablib. If you need -additional formats like ``cli`` or ``Pandas DataFrame``, you should install the -appropriate tablib dependencies (e.g. ``pip install tablib[pandas]``). Read -more on the `tablib format documentation page`_. +This will automatically install the default formats supported by tablib. +If you need additional formats you should install the extra dependencies as required +appropriate tablib dependencies (e.g. ``pip install django-import-export[xlsx]``). -.. _tablib format documentation page: https://tablib.readthedocs.io/en/stable/formats/ +To install all available formats, use ``pip install django-import-export[all]``. + +For all formats, see the +`tablib documentation `_. Alternatively, you can install the git repository directly to obtain the development version:: - $ pip install -e git+https://github.com/django-import-export/django-import-export.git#egg=django-import-export + pip install -e git+https://github.com/django-import-export/django-import-export.git#egg=django-import-export -Now, you're good to go, unless you want to use django-import-export from the +Now, you're good to go, unless you want to use import-export from the admin as well. In this case, you need to add it to your ``INSTALLED_APPS`` and let Django collect its static files. @@ -36,9 +38,7 @@ let Django collect its static files. $ python manage.py collectstatic All prerequisites are set up! See :doc:`getting_started` to learn how to use -django-import-export in your project. - - +import-export in your project. Settings ======== @@ -49,7 +49,7 @@ You can configure the following in your settings file: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Controls if resource importing should use database transactions. Defaults to -``False``. Using transactions makes imports safer as a failure during import +``True``. Using transactions makes imports safer as a failure during import won’t import only part of the data set. Can be overridden on a ``Resource`` class by setting the @@ -58,41 +58,76 @@ Can be overridden on a ``Resource`` class by setting the ``IMPORT_EXPORT_SKIP_ADMIN_LOG`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If set to ``True``, skips the creation of admin log entries when importing. +If set to ``True``, skips the creation of admin log entries when importing via the +:ref:`Admin UI`. Defaults to ``False``. This can speed up importing large data sets, at the cost of losing an audit trail. Can be overridden on a ``ModelAdmin`` class inheriting from ``ImportMixin`` by setting the ``skip_admin_log`` class attribute. +.. _import_export_tmp_storage_class: + ``IMPORT_EXPORT_TMP_STORAGE_CLASS`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +A string path to the preferred temporary storage module. + Controls which storage class to use for storing the temporary uploaded file during imports. Defaults to ``import_export.tmp_storages.TempFolderStorage``. Can be overridden on a ``ModelAdmin`` class inheriting from ``ImportMixin`` by setting the ``tmp_storage_class`` class attribute. +.. _import_export_default_file_storage: + +``IMPORT_EXPORT_DEFAULT_FILE_STORAGE`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A string path to a customized storage implementation. + +This setting is deprecated and only applies if using Django with a version less than 4.2, +and will be removed in a future release. + +.. _import_export_import_permission_code: + ``IMPORT_EXPORT_IMPORT_PERMISSION_CODE`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If set, lists the permission code that is required for users to perform the -“import” action. Defaults to ``None``, which means everybody can perform +'import' action. Defaults to ``None``, which means all users can perform imports. Django’s built-in permissions have the codes ``add``, ``change``, ``delete``, -and ``view``. You can also add your own permissions. +and ``view``. You can also add your own permissions. For example, if you set this +value to 'import', then you can define an explicit permission for import in the example +app with: + +.. code-block:: python + + from core.models import Book + from django.contrib.auth.models import Permission + from django.contrib.contenttypes.models import ContentType + + content_type = ContentType.objects.get_for_model(Book) + permission = Permission.objects.create( + codename="import_book", + name="Can import book", + content_type=content_type, + ) + +Now only users who are assigned 'import_book' permission will be able to perform +imports. For more information refer to the +`Django auth `_ +documentation. + +.. _import_export_export_permission_code: ``IMPORT_EXPORT_EXPORT_PERMISSION_CODE`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If set, lists the permission code that is required for users to perform the -“export” action. Defaults to ``None``, which means everybody can perform -exports. - -Django’s built-in permissions have the codes ``add``, ``change``, ``delete``, -and ``view``. You can also add your own permissions. +Defines the same behaviour as :ref:`IMPORT_EXPORT_IMPORT_PERMISSION_CODE`, but for +export. ``IMPORT_EXPORT_CHUNK_SIZE`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -104,23 +139,172 @@ decreasing it, or speed up exports by increasing it. Can be overridden on a ``Resource`` class by setting the ``chunk_size`` class attribute. +.. _import_export_skip_admin_confirm: + +``IMPORT_EXPORT_SKIP_ADMIN_CONFIRM`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If ``True``, no import confirmation page will be presented to the user in the Admin UI. +The file will be imported in a single step. + +By default, the import will occur in a transaction. +If the import causes any runtime errors (including validation errors), +then the errors are presented to the user and then entire transaction is rolled back. + +Note that if you disable transaction support via configuration (or if your database +does not support transactions), then validation errors will still be presented to the user +but valid rows will have imported. + +This flag can be enabled for the model admin using the :attr:`~import_export.mixins.BaseImportMixin.skip_import_confirm` +flag. + +.. _import_export_skip_admin_export_ui: + +``IMPORT_EXPORT_SKIP_ADMIN_EXPORT_UI`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A boolean value which will skip the :ref:`export form` in the Admin UI, when the export is +initiated from the :ref:`change list page`. +The file will be exported in a single step. + +If enabled: + +* the first element in the :attr:`~import_export.mixins.BaseImportExportMixin.resource_classes` list will be used. +* the first element in the :ref:`export_formats` list will be used. + +This flag can be enabled for the model admin using the :attr:`~import_export.mixins.BaseExportMixin.skip_export_form` +flag. + +.. _import_export_skip_admin_action_export_ui: + +``IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A boolean value which will skip the :ref:`export form` in the Admin UI, but only when the export is +requested from an :ref:`Admin UI action`, or from the 'Export' button on the +:ref:`change form `. + +.. _import_export_escape_formulae_on_export: + +``IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If set to ``True``, strings will be sanitized by removing any leading '=' character. This is to prevent execution of +Excel formulae. By default this is ``False``. + +.. _import_export_escape_illegal_chars_on_export: + +``IMPORT_EXPORT_ESCAPE_ILLEGAL_CHARS_ON_EXPORT`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If an export to XLSX format generates +`IllegalCharacterError `_, then +if this flag is ``True`` strings will be sanitized by removing any invalid Excel characters, +replacing them with the unicode replacement character. +By default this is ``False``, meaning that ``IllegalCharacterError`` is caught and re-raised as ``ValueError``. + +.. _import_export_formats: + +``IMPORT_EXPORT_FORMATS`` +~~~~~~~~~~~~~~~~~~~~~~~~~ + +A list that defines which file formats will be allowed during imports and exports. Defaults +to ``import_export.formats.base_formats.DEFAULT_FORMATS``. +The values must be those provided in ``import_export.formats.base_formats`` e.g + +.. code-block:: python + + # settings.py + from import_export.formats.base_formats import XLSX + IMPORT_EXPORT_FORMATS = [XLSX] + +This can be set for a specific model admin by declaring the ``formats`` attribute. + +.. _import_formats: + +``IMPORT_FORMATS`` +~~~~~~~~~~~~~~~~~~ + +A list that defines which file formats will be allowed during imports. Defaults +to ``IMPORT_EXPORT_FORMATS``. +The values must be those provided in ``import_export.formats.base_formats`` e.g + +.. code-block:: python + + # settings.py + from import_export.formats.base_formats import CSV, XLSX + IMPORT_FORMATS = [CSV, XLSX] + +This can be set for a specific model admin by declaring the ``import_formats`` attribute. + +.. _export_formats: + +``EXPORT_FORMATS`` +~~~~~~~~~~~~~~~~~~ + +A list that defines which file formats will be allowed during exports. Defaults +to ``IMPORT_EXPORT_FORMATS``. +The values must be those provided in ``import_export.formats.base_formats`` e.g + +.. code-block:: python + + # settings.py + from import_export.formats.base_formats import XLSX + EXPORT_FORMATS = [XLSX] + +This can be set for a specific model admin by declaring the ``export_formats`` attribute. + +.. _import_export_import_ignore_blank_lines: + +``IMPORT_EXPORT_IMPORT_IGNORE_BLANK_LINES`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If set to ``True``, rows without content will be ignored in XSLX imports. +This prevents an old Excel 1.0 bug which causes openpyxl ``max_rows`` to be counting all +logical empty rows. Some editors (like LibreOffice) might add :math:`2^{20}` empty rows to the +file, which causes a significant slowdown. By default this is ``False``. + +.. _exampleapp: Example app =========== -There's an example application that showcases what django-import-export can do. -It's assumed that you have set up a Python ``venv`` with all required dependencies -(from ``test.txt`` requirements file) and are able to run Django locally. +There's an example application that showcases what import_export can do. -You can run the example application as follows:: +Before starting, set up a virtual environment ("venv") using :ref:`these instructions`. + +You can initialize and run the example application as follows:: cd tests ./manage.py makemigrations ./manage.py migrate ./manage.py createsuperuser - ./manage.py loaddata category.json book.json + ./manage.py loaddata author.json category.json book.json ./manage.py runserver Go to http://127.0.0.1:8000 -``books-sample.csv`` contains sample book data which can be imported. +For example import files, see :ref:`getting_started:Test data`. + +.. _logging: + +Configure logging +================= + +You can adjust the log level to see output as required. +This is an example configuration to be placed in your application settings:: + + LOGGING = { + "version" 1, + "handlers": { + "console": {"level": "DEBUG", "class": "logging.StreamHandler"}, + }, + "loggers": { + "django.db.backends": {"level": "INFO", "handlers": ["console"]}, + "import_export": { + "handlers": ["console"], + "level": "INFO", + }, + }, + } + diff --git a/docs/management_commands.rst b/docs/management_commands.rst new file mode 100644 index 000000000..8c3589f50 --- /dev/null +++ b/docs/management_commands.rst @@ -0,0 +1,73 @@ +Django Management Commands +========================== + +Export Command +-------------- + +The ``export`` command allows you to export data from a specified Django model +or a resource class. The exported data can be saved in different formats, such +as CSV or XLSX. + +Usage +----- + +.. code-block:: bash + + python manage.py export [--encoding ENCODING] + +- **format**: Specify the format in which the data should be exported. - +- **resource**: Specify the resource or model to export. Accepts a resource class or a model class in dotted path format. - **--encoding** (optional): Specify the encoding (e.g., 'utf-8') to be used for the exported data. + +Example +------- + +.. code-block:: bash + + python manage.py export CSV auth.User + +This command will export the User model data in CSV format using utf-8 +encoding. + +Another example: + +.. code-block:: bash + + python manage.py export XLSX mymodule.resources.MyResource + +This command will export the data from ``MyResource`` resource in XLSX format. + +Import Command +-------------- + +The ``import`` command allows you to import data from a file using a specified +Django model or a custom resource class. + +Usage +----- + +.. code-block:: bash + + python manage.py import [--format FORMAT] [--encoding ENCODING] [--dry-run] [--raise-errors] + +- **resource**: The resource class or model class in dotted path format. +- **import_file_name**: The file from which data is imported (``-`` can be used to indicate stdin). +- **--format** (optional): Specify the format of the data to import. If not provided, it will be guessed from the mimetype. +- **--encoding** (optional): Specify the character encoding of the data. +- **--dry-run**: Perform a trial run without making changes. +- **--raise-errors**: Raise any encountered errors during execution. + +Example +------- + +Import data from file into auth.User model using default model resource: + +.. code-block:: bash + + python manage.py import auth.User users.csv + +Import data from file using custom model resource, raising errors: + +.. code-block:: bash + + python manage.py import --raise-errors helper.MyUserResource users.csv + diff --git a/docs/release_notes.rst b/docs/release_notes.rst new file mode 100644 index 000000000..962ec7e79 --- /dev/null +++ b/docs/release_notes.rst @@ -0,0 +1,521 @@ +============= +Release Notes +============= + +v4.2 +---- + +* When exporting via :ref:`admin action`, the queryset is now filtered on + :meth:`~import_export.admin.ExportMixin.get_queryset` instead of the Model's default queryset. + This should have no impact on existing implementations. + + This change also made :meth:`~import_export.admin.ExportMixin.get_valid_export_item_pks` obsolete, as the + ModelAdmin's :meth:`~import_export.admin.ExportMixin.get_export_queryset`, or + ModelAdmin's get_queryset can be used instead. + The :meth:`~import_export.admin.ExportMixin.get_valid_export_item_pks` method is now deprecated. + + See `PR 1890 `_. + +* Removed internal method ``_get_enabled_export_fields()`` in favour of passing the selected fields list as a + new parameter to :meth:`~import_export.resources.Resource.export_resource` and + :meth:`~import_export.resources.Resource.get_export_headers`. + +* Hide the "Resource" form when it only has one option, to avoid potentially confusing text in the interface like + "Resource: BookResource". To undo this change, use a form subclass that changes the field’s widget to a + ``django.forms.Select``. See `1908 `_ + +* `tablib `_ has been upgraded from v3.5.0 to 3.6.1. + This upgrade removes tablib's dependency on `MarkupPy `_ in favour + of ``ElementTree``. If you export to HTML, then this change may affect your output format, particularly if you have + already escaped HTML characters in the text. + + See `issue 1627 `_. + +Breaking changes +^^^^^^^^^^^^^^^^ + +* This release fixes a regression introduced in v4. From v4.2, numeric, boolean and date/time widgets are written as + native values to spreadsheet formats (ODS, XLS, XLSX). This was the default behavior in v3. + See :ref:`documentation`. + + This means that the ``coerce_to_string`` value which is passed to :class:`~import_export.widgets.Widget` is now + ignored if you are exporting to a spreadsheet format from the Admin interface. + + If you have subclassed ``Widget``, ``Field`` or ``Resource``, then you may need to adjust your code to include + the ``**kwargs`` param as follows: + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + + * - ``Widget.render(self, value, obj=None)`` + - ``Widget.render(self, value, obj=None, **kwargs)`` + + * - ``Field.export(self, instance)`` + - ``Field.export(self, instance, **kwargs)`` + + * - ``Resource.export_field(self, field, instance)`` + - ``Resource.export_field(self, field, instance, **kwargs)`` + + * - ``Resource.export_resource(self, instance, selected_fields=None)`` + - ``Resource.export_resource(self, instance, selected_fields=None, **kwargs)`` + +v4.1 +---- + +The ``Resource.get_fields()`` method is no longer called within the package and has been deprecated. +If you have overridden this method then it should be removed. + +v4.0 +---- + +v4 introduces significant updates to import-export. We have taken the opportunity to introduce +breaking changes in order to fix some long-standing issues. + +Refer to the :doc:`changelog` for more information. Please ensure you test +thoroughly before deploying v4 to production. + +This guide describes the major changes and how to upgrade. + +Installation +^^^^^^^^^^^^ + +We have modified installation methods to allow for optional dependencies. +This means that you have to explicitly declare dependencies when installing import-export. + +If you are not sure, or want to preserve the pre-v4 behaviour, then ensure that +import-export is installed as follows (either in your requirements file or during +installation):: + + django-import-export[all] + +Functional changes +^^^^^^^^^^^^^^^^^^ + +CharWidget +"""""""""" + +Constructor arguments are dynamically set during instantiation based on the properties of the underlying Django +db CharField. If the db field has `blank `_ +set to True, then incoming values of empty strings or null are stored as empty strings. +See :class:`~import_export.widgets.CharWidget`. + +:meth:`~import_export.widgets.CharWidget.clean` will now return a string type as the default. +The ``coerce_to_string`` option introduced in v3 is no longer used in this method. + +Validation error messages +""""""""""""""""""""""""" + +The following widgets have had validation error messages updated: + +* :class:`~import_export.widgets.DateWidget` +* :class:`~import_export.widgets.TimeWidget` +* :class:`~import_export.widgets.DateTimeWidget` +* :class:`~import_export.widgets.DurationWidget` + +Export format +""""""""""""" + +We have standardized the export output which is returned from +:meth:`~import_export.widgets.Widget.render`. + +Prior to v4, the export format returned from ``render()`` varied between Widget implementations. +In v4, return values are rendered as strings by default (where applicable), with +``None`` values returned as empty strings. Widget params can modify this behavior. + +This causes a change when exporting to Excel. In v3, certain fields, such as numeric values, were rendered as their +native type. In v4, all fields are now rendered as strings. To preserve the v3 behavior when exporting to Excel, +set the ``coerce_to_string`` param to ``False``. See :ref:`documentation`. + +:doc:`Widget API documentation`. + +Export field order +"""""""""""""""""" + +The ordering rules for exported fields has been standardized. See :ref:`documentation`. + +Error output +"""""""""""" + +If the ``raise_errors`` parameter of :meth:`~import_export.resources.Resource.import_data` is ``True``, then an instance +of :class:`~import_export.exceptions.ImportError` is raised. This exception wraps the underlying exception. + +See `this PR `_. + +Check ``import_id_fields`` +"""""""""""""""""""""""""" + +Prior to v4 we had numerous issues where users were confused when imports failed due to declared ``import_id_fields`` +not being present in the dataset. We added functionality in v4 to check for this and to raise a clearer error message. + +In some use-cases, it is a requirement that ``import_id_fields`` are not in the dataset, and are generated dynamically. +If this affects your implementation, start with the documentation :ref:`here`. + +Deprecations +^^^^^^^^^^^^ + +* The ``obj`` param passed to :meth:`~import_export.widgets.Widget.render` is deprecated. + The :meth:`~import_export.widgets.Widget.render` method should not need to have a reference to + model instance. + The call to :meth:`~import_export.widgets.Widget.render` from :meth:`~import_export.fields.Field.export` has been removed. + +* Use of ``ExportViewFormMixin`` is deprecated. See `this issue `_. + +* See :ref:`renamed_methods`. + +* In the Admin UI, the declaration of ``resource_class`` is replaced by ``resource_classes``:: + + class BookAdmin(ImportExportModelAdmin): + # remove this line + # resource_class = BookResource + # replace with this + resource_classes = [BookResource] + +Admin UI +^^^^^^^^ + +LogEntry +"""""""" + +``LogEntry`` instances are created during import for creates, updates and deletes. +The functionality to store ``LogEntry`` has changed in v4 in order to address a deprecation in Django 5. +For this to work correctly, deleted instances are now always copied and retained in each +:class:`~import_export.results.RowResult` so that they can be recorded in each ``LogEntry``. + +This only occurs for delete operations initiated from the Admin UI. + +Export action +""""""""""""" + +The export action has been updated to include the export workflow. Prior to v4, it was possible to select export +selected items using an export admin action. However this meant that the export workflow was skipped and it was not +possible to select the export resource. This has been fixed in v4 so that export workflow is now present when +exporting via the Admin UI action. For more information see :ref:`export documentation`. + +Export selected fields +"""""""""""""""""""""" + +The :ref:`export 'confirm' page` now includes selectable fields for export. +If you wish to revert to the previous (v3) version of the export confirm screen, add a +:attr:`~import_export.admin.ExportMixin.export_form_class` declaration to your Admin class subclass, for example:: + + class BookAdmin(ImportExportModelAdmin): + export_form_class = ExportForm + +Success message +""""""""""""""" + +The success message shown on successful import has been updated to include the number of 'deleted' and 'skipped' rows. +See `this PR `_. + +Import error messages +""""""""""""""""""""" + +The default error message for import errors has been modified to simplify the format. +Error messages now contain the error message only by default. The row and traceback are not presented. + +The original format can be restored by setting :attr:`~import_export.admin.ImportMixin.import_error_display` on the +Admin class definition. For example:: + + class BookAdmin(ImportExportModelAdmin): + import_error_display = ("message", "row", "traceback") + + +See `this issue `_. + +API changes +^^^^^^^^^^^ + +v4 of import-export contains a number of changes to the API. These changes are summarized in the table below. +Refer to +`this PR `_ for detailed information. + +If you have customized import-export by overriding methods, then you may have to modify your installation before +working with v4. + +If you have not overridden any methods then you should not be affected by these changes and no changes to your code +should be necessary. + +The API changes include changes to method arguments, although some method names have changed. + +Methods which process row data have been updated so that method args are standardized. +This has been done to resolve inconsistency issues where the parameters differed between method calls, and to allow +easier extensibility. + +:class:`import_export.resources.Resource` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. _renamed_methods: + +Renamed methods +""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``import_obj(self, obj, data, dry_run, **kwargs)`` + - ``import_instance(self, instance, row, **kwargs)`` + - * ``obj`` param renamed to ``instance`` + * ``data`` param renamed to ``row`` + * ``dry_run`` param now in ``kwargs`` + + * - ``after_import_instance(self, instance, new, row_number=None, **kwargs)`` + - ``after_init_instance(self, instance, new, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``row_number`` now in ``kwargs`` + +Parameter changes +""""""""""""""""" + +This section describes methods in which the parameters have changed. + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``before_import(self, dataset, using_transactions, dry_run, **kwargs)`` + - ``before_import(self, dataset, **kwargs)`` + - * ``using_transactions`` param now in ``kwargs`` + * ``dry_run`` param now in ``kwargs`` + + * - ``after_import(self, dataset, result, using_transactions, dry_run, **kwargs)`` + - ``after_import(self, dataset, result, **kwargs)`` + - * ``using_transactions`` param now in ``kwargs`` + * ``dry_run`` param now in ``kwargs`` + + * - ``before_import_row(self, row, row_number=None, **kwargs)`` + - ``before_import_row(self, row, **kwargs)`` + - * ``row_number`` now in ``kwargs`` + + * - ``after_import_row(self, row, row_result, row_number=None, **kwargs)`` + - ``after_import_row(self, row, row_result, **kwargs)`` + - * ``row_number`` now in ``kwargs`` + + * - ``import_row(self, row, instance_loader, using_transactions=True, dry_run=False, **kwargs)`` + - ``import_row(self, row, instance_loader, **kwargs)`` + - * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``save_instance(self, instance, is_create, using_transactions=True, dry_run=False)`` + - ``save_instance(self, instance, is_create, row, ***kwargs)`` + - * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + * ``row`` added as mandatory arg + + * - ``save_m2m(self, obj, data, using_transactions, dry_run)`` + - ``save_m2m(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``obj`` renamed to ``instance`` + * ``data`` renamed to ``row`` + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``before_save_instance(self, instance, using_transactions, dry_run)`` + - ``before_save_instance(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``after_save_instance(self, instance, using_transactions, dry_run)`` + - ``after_save_instance(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``delete_instance(self, instance, using_transactions=True, dry_run=False)`` + - ``delete_instance(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``before_delete_instance(self, instance, dry_run)`` + - ``before_delete_instance(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``after_delete_instance(self, instance, dry_run)`` + - ``after_delete_instance(self, instance, row, **kwargs)`` + - * ``row`` added as mandatory arg + * ``dry_run`` param now in ``kwargs`` + * ``using_transactions`` param now in ``kwargs`` + + * - ``import_field(self, field, obj, data, is_m2m=False, **kwargs)`` + - ``import_field(self, field, instance, row, is_m2m=False, **kwargs):`` + - * ``obj`` renamed to ``instance`` + * ``data`` renamed to ``row`` + + * - ``before_export(self, queryset, *args, **kwargs)`` + - ``before_export(self, queryset, **kwargs)`` + - * unused ``*args`` list removed + + * - ``after_export(self, queryset, data, *args, **kwargs)`` + - ``after_export(self, queryset, dataset, **kwargs)`` + - * unused ``*args`` list removed + * ``data`` renamed to ``dataset`` + + * - ``filter_export(self, queryset, *args, **kwargs)`` + - ``filter_export(self, queryset, **kwargs)`` + - * unused ``*args`` list removed + + * - ``export_field(self, field, obj)`` + - ``export_field(self, field, instance)`` + - * ``obj`` renamed to ``instance`` + + * - ``export_resource(self, obj)`` + - ``export_resource(self, instance, fields=None)`` + - * ``obj`` renamed to ``instance`` + * ``fields`` passed as kwarg + + * - ``export(self, *args, queryset=None, **kwargs)`` + - ``export(self, queryset=None, **kwargs)`` + - * unused ``*args`` list removed + + * - ``get_export_headers(self)`` + - ``get_export_headers(self, fields=None)`` + - * ``fields`` passed as kwarg + + +:class:`import_export.mixins.BaseImportExportMixin` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Parameter changes +""""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``get_resource_classes(self)`` + - ``get_resource_classes(self, request)`` + - * Added ``request`` param + + * - ``get_resource_kwargs(self, request, *args, **kwargs)`` + - ``get_resource_kwargs(self, request, **kwargs)`` + - * unused ``*args`` list removed + +:class:`import_export.mixins.BaseImportMixin` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Parameter changes +""""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``get_import_resource_kwargs(self, request, *args, **kwargs)`` + - ``get_import_resource_kwargs(self, request, **kwargs)`` + - * unused ``*args`` list removed + + * - ``get_import_resource_classes(self)`` + - ``get_import_resource_classes(self, request)`` + - * Added ``request`` param + + * - ``choose_import_resource_class(self, form)`` + - ``choose_import_resource_class(self, form, request)`` + - * Added ``request`` param + +:class:`import_export.mixins.BaseExportMixin` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Parameter changes +""""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``get_export_resource_classes(self)`` + - ``get_export_resource_classes(self, request)`` + - * Added ``request`` param + + * - ``get_export_resource_kwargs(self, request, *args, **kwargs)`` + - ``get_export_resource_kwargs(self, request, **kwargs)`` + - * unused ``*args`` list removed + + * - ``get_data_for_export(self, request, queryset, *args, **kwargs)`` + - ``get_data_for_export(self, request, queryset, **kwargs)`` + - * unused ``*args`` list removed + + * - ``choose_export_resource_class(self, form)`` + - ``choose_export_resource_class(self, form, request)`` + - * Added ``request`` param + + +:class:`import_export.fields.Field` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Parameter changes +""""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``clean(self, data, **kwargs)`` + - ``clean(self, row, **kwargs)`` + - * ``data`` renamed to ``row`` + + * - ``get_value(self, instance)`` + - ``get_value(self, obj)`` + - * ``obj`` renamed to ``instance`` + + * - ``save(self, obj, data, is_m2m=False, **kwargs)`` + - ``save(self, instance, row, is_m2m=False, **kwargs)`` + - * ``obj`` renamed to ``instance`` + * ``data`` renamed to ``row`` + + * - ``export(self, obj)`` + - ``export(self, instance)`` + - * ``obj`` renamed to ``instance`` + + +:class:`import_export.forms.ImportExportFormBase` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you have subclassed one of the :mod:`~import_export.forms` then you may need to +modify the parameters passed to constructors. + +The ``input_format`` field of :class:`~import_export.forms.ImportForm` has been moved to the parent class +(:class:`~import_export.forms.ImportExportFormBase`) and renamed to ``format``. + +The ``file_format`` field of :class:`~import_export.forms.ExportForm` has been removed and is now replaced by +:attr:`~import_export.forms.ImportExportFormBase.format`. + +Parameter changes +""""""""""""""""" + +.. list-table:: + :header-rows: 1 + + * - Previous + - New + - Summary + + * - ``__init__(self, *args, resources=None, **kwargs)`` + - ``__init__(self, formats, resources, **kwargs)`` + - * ``formats`` added as a mandatory arg + * ``resources`` added as a mandatory arg + * unused ``*args`` list removed diff --git a/docs/screenshots.rst b/docs/screenshots.rst new file mode 100644 index 000000000..ec336ef5d --- /dev/null +++ b/docs/screenshots.rst @@ -0,0 +1,53 @@ +=========== +Screenshots +=========== + +.. |import-form| image:: _static/images/screenshots/import-form.png + :width: 600 + :alt: screenshot of the import form in django-import-export + +.. |confirm-import| image:: _static/images/screenshots/confirm-import.png + :width: 600 + :alt: screenshot of the import confirm page in django-import-export + +.. |import-complete| image:: _static/images/screenshots/import-complete.png + :width: 600 + :alt: screenshot of the import completed page in django-import-export + +.. |import-update-with-authors| image:: _static/images/screenshots/import-update-with-authors.png + :width: 600 + :alt: screenshot of import update with author information in django-import-export + +.. |export-selected-action| image:: _static/images/screenshots/export-selected-action.png + :width: 600 + :alt: screenshot of selecting existing records for export in django-import-export + +.. |export-form| image:: _static/images/screenshots/export-form.png + :width: 600 + :alt: screenshot of selecting existing records for export in django-import-export + +These are some screenshots for the Admin UI of the :ref:`example application`. + +|import-form| + +Shows the initial import form with fields for selecting the resource, file and format. + +|confirm-import| + +Shows the confirmation page which appears prior to committing the import to the database. + +|import-complete| + +Shows the confirmation page on successful import. + +|import-update-with-authors| + +Shows the preview page for updating existing records with author details. + +|export-selected-action| + +Shows selecting records for export. + +|export-form| + +Shows the export form with fields for selecting the resource, fields and format. diff --git a/docs/testing.rst b/docs/testing.rst new file mode 100644 index 000000000..f8ac394bd --- /dev/null +++ b/docs/testing.rst @@ -0,0 +1,83 @@ +Testing +======= + +All tests can be run using `tox `_ simply by running the `tox` command. By default, tests +are run against a local sqlite2 instance. `pyenv `_ can be used to manage multiple +python installations. + +MySql / Postgres tests +###################### + +By using Docker, you can also run tests against either a MySQL db and/or Postgres. + +The ``IMPORT_EXPORT_TEST_TYPE`` must be set according to the type of tests you wish to run. Set to 'postgres' for +postgres tests, and 'mysql-innodb' for mysql tests. If this environment variable is blank (or is any other value) then +the default sqlite2 db will be used. + +This process is scripted in ``runtests.sh``. Assuming that you have docker installed on your system, running +``runtests.sh`` will run tox against sqlite2, mysql and postgres. You can edit this script to customise testing as you +wish. + +Note that this is the process which is undertaken by CI builds. + +Coverage +######## + +Coverage data is written in parallel mode by default (defined in ``pyproject.toml``). + +A simple coverage report can be obtained with + +.. code-block:: bash + + make coverage + +However this may omit lines which are db specific. A full coverage report can be obtained by running tox. + +After a tox run, you can view coverage data as follows: + +.. code-block:: bash + + # combine all coverage data generated by tox into one file + coverage combine + + # produce an HTML coverage report + coverage html + +Check the output of the above commands to locate the coverage HTML file. + +Bulk testing +############ + +There is a helper script available to generate and profile bulk loads. See ``scripts/bulk_import.py``. + +You can use this script by configuring environment variables as defined above, and then installing and running the test +application. In order to run the helper script, you will need to install ``make install-test-requirements``, and then add +`django-extensions` to `settings.py` (`INSTALLED_APPS`). + +You can then run the script as follows: + +.. code-block:: bash + + # run creates, updates, and deletes + ./manage.py runscript bulk_import + + # pass 'create', 'update' or 'delete' to run the single test + ./manage.py runscript bulk_import --script-args create + +Enable logging +^^^^^^^^^^^^^^ + +You can see console SQL debug logging by updating the ``LOGGING`` block in `settings.py`:: + + LOGGING = { + "version": 1, + "handlers": {"console": {"class": "logging.StreamHandler"}}, + "root": { + "handlers": ["console"], + }, + "loggers": { + "django.db.backends": {"level": "DEBUG", "handlers": ["console"]}, + } + } + + diff --git a/import_export/__init__.py b/import_export/__init__.py index e90ba7fa0..a909dcde1 100644 --- a/import_export/__init__.py +++ b/import_export/__init__.py @@ -1 +1,11 @@ -__version__ = '2.7.1' +try: + # import from _version.py generated by setuptools_scm during release + from ._version import version as __version__ +except ImportError: + # return a valid version if running in a context where no + # version available (e.g. local build) + from os import path as _path + + from setuptools_scm import get_version as _gv + + __version__ = _gv(_path.join(_path.dirname(__file__), _path.pardir)) diff --git a/import_export/admin.py b/import_export/admin.py index 1c552b277..8dcf8958f 100644 --- a/import_export/admin.py +++ b/import_export/admin.py @@ -1,32 +1,70 @@ +import logging +import warnings + import django -from django import forms from django.conf import settings from django.contrib import admin, messages from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry from django.contrib.auth import get_permission_codename from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import PermissionDenied +from django.core.exceptions import FieldError, PermissionDenied +from django.forms import MultipleChoiceField, MultipleHiddenInput from django.http import HttpResponse, HttpResponseRedirect +from django.shortcuts import render from django.template.response import TemplateResponse from django.urls import path, reverse from django.utils.decorators import method_decorator -from django.utils.encoding import force_str from django.utils.module_loading import import_string from django.utils.translation import gettext_lazy as _ from django.views.decorators.http import require_POST -from .forms import ConfirmImportForm, ExportForm, ImportForm, export_action_form_factory +from .formats.base_formats import BINARY_FORMATS +from .forms import ConfirmImportForm, ImportForm, SelectableFieldsExportForm from .mixins import BaseExportMixin, BaseImportMixin from .results import RowResult from .signals import post_export, post_import from .tmp_storages import TempFolderStorage +logger = logging.getLogger(__name__) + class ImportExportMixinBase: + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.init_change_list_template() + + def init_change_list_template(self): + # Store already set change_list_template to allow users to independently + # customize the change list object tools. This treats the cases where + # `self.change_list_template` is `None` (the default in `ModelAdmin`) or + # where `self.import_export_change_list_template` is `None` as falling + # back on the default templates. + if getattr(self, "change_list_template", None): + self.ie_base_change_list_template = self.change_list_template + else: + self.ie_base_change_list_template = "admin/change_list.html" + + try: + self.change_list_template = getattr( + self, "import_export_change_list_template", None + ) + except AttributeError: + logger.warning("failed to assign change_list_template attribute") + + if self.change_list_template is None: + self.change_list_template = self.ie_base_change_list_template + def get_model_info(self): app_label = self.model._meta.app_label return (app_label, self.model._meta.model_name) + def changelist_view(self, request, extra_context=None): + extra_context = extra_context or {} + extra_context["ie_base_change_list_template"] = ( + self.ie_base_change_list_template + ) + return super().changelist_view(request, extra_context) + class ImportMixin(BaseImportMixin, ImportExportMixinBase): """ @@ -37,25 +75,35 @@ class ImportMixin(BaseImportMixin, ImportExportMixinBase): """ #: template for change_list view - change_list_template = 'admin/import_export/change_list_import.html' + import_export_change_list_template = "admin/import_export/change_list_import.html" #: template for import view - import_template_name = 'admin/import_export/import.html' + import_template_name = "admin/import_export/import.html" + #: form class to use for the initial import step + import_form_class = ImportForm + #: form class to use for the confirm import step + confirm_form_class = ConfirmImportForm #: import data encoding - from_encoding = "utf-8" + from_encoding = "utf-8-sig" + #: control which UI elements appear when import errors are displayed. + #: Available options: 'message', 'row', 'traceback' + import_error_display = ("message",) + skip_admin_log = None # storage class for saving temporary files tmp_storage_class = None def get_skip_admin_log(self): if self.skip_admin_log is None: - return getattr(settings, 'IMPORT_EXPORT_SKIP_ADMIN_LOG', False) + return getattr(settings, "IMPORT_EXPORT_SKIP_ADMIN_LOG", False) else: return self.skip_admin_log def get_tmp_storage_class(self): if self.tmp_storage_class is None: tmp_storage_class = getattr( - settings, 'IMPORT_EXPORT_TMP_STORAGE_CLASS', TempFolderStorage, + settings, + "IMPORT_EXPORT_TMP_STORAGE_CLASS", + TempFolderStorage, ) else: tmp_storage_class = self.tmp_storage_class @@ -64,107 +112,161 @@ def get_tmp_storage_class(self): tmp_storage_class = import_string(tmp_storage_class) return tmp_storage_class + def get_tmp_storage_class_kwargs(self): + """Override this method to provide additional kwargs to temp storage class.""" + return {} + def has_import_permission(self, request): """ Returns whether a request has import permission. """ - IMPORT_PERMISSION_CODE = getattr(settings, 'IMPORT_EXPORT_IMPORT_PERMISSION_CODE', None) + IMPORT_PERMISSION_CODE = getattr( + settings, "IMPORT_EXPORT_IMPORT_PERMISSION_CODE", None + ) if IMPORT_PERMISSION_CODE is None: return True opts = self.opts codename = get_permission_codename(IMPORT_PERMISSION_CODE, opts) - return request.user.has_perm("%s.%s" % (opts.app_label, codename)) + return request.user.has_perm(f"{opts.app_label}.{codename}") def get_urls(self): urls = super().get_urls() info = self.get_model_info() my_urls = [ - path('process_import/', + path( + "process_import/", self.admin_site.admin_view(self.process_import), - name='%s_%s_process_import' % info), - path('import/', + name="%s_%s_process_import" % info, + ), + path( + "import/", self.admin_site.admin_view(self.import_action), - name='%s_%s_import' % info), + name="%s_%s_import" % info, + ), ] return my_urls + urls @method_decorator(require_POST) - def process_import(self, request, *args, **kwargs): + def process_import(self, request, **kwargs): """ Perform the actual import action (after the user has confirmed the import) """ if not self.has_import_permission(request): raise PermissionDenied - form_type = self.get_confirm_import_form() - confirm_form = form_type(request.POST) + confirm_form = self.create_confirm_form(request) if confirm_form.is_valid(): import_formats = self.get_import_formats() - input_format = import_formats[ - int(confirm_form.cleaned_data['input_format']) - ]() - tmp_storage = self.get_tmp_storage_class()(name=confirm_form.cleaned_data['import_file_name']) - data = tmp_storage.read(input_format.get_read_mode()) - if not input_format.is_binary() and self.from_encoding: - data = force_str(data, self.from_encoding) - dataset = input_format.create_dataset(data) + input_format = import_formats[int(confirm_form.cleaned_data["format"])]( + encoding=self.from_encoding + ) + encoding = None if input_format.is_binary() else self.from_encoding + tmp_storage_cls = self.get_tmp_storage_class() + tmp_storage = tmp_storage_cls( + name=confirm_form.cleaned_data["import_file_name"], + encoding=encoding, + read_mode=input_format.get_read_mode(), + **self.get_tmp_storage_class_kwargs(), + ) - result = self.process_dataset(dataset, confirm_form, request, *args, **kwargs) + data = tmp_storage.read() + dataset = input_format.create_dataset(data) + result = self.process_dataset(dataset, confirm_form, request, **kwargs) tmp_storage.remove() return self.process_result(result, request) - - def process_dataset(self, dataset, confirm_form, request, *args, **kwargs): - - res_kwargs = self.get_import_resource_kwargs(request, form=confirm_form, *args, **kwargs) - resource = self.get_import_resource_class()(**res_kwargs) - - imp_kwargs = self.get_import_data_kwargs(request, form=confirm_form, *args, **kwargs) - return resource.import_data(dataset, - dry_run=False, - raise_errors=True, - file_name=confirm_form.cleaned_data['original_file_name'], - user=request.user, - **imp_kwargs) + else: + context = self.admin_site.each_context(request) + context.update( + { + "title": _("Import"), + "confirm_form": confirm_form, + "opts": self.model._meta, + "errors": confirm_form.errors, + } + ) + return TemplateResponse(request, [self.import_template_name], context) + + def process_dataset( + self, + dataset, + form, + request, + **kwargs, + ): + res_kwargs = self.get_import_resource_kwargs(request, form=form, **kwargs) + resource = self.choose_import_resource_class(form, request)(**res_kwargs) + imp_kwargs = self.get_import_data_kwargs(request=request, form=form, **kwargs) + imp_kwargs["retain_instance_in_row_result"] = True + + return resource.import_data( + dataset, + dry_run=False, + file_name=form.cleaned_data.get("original_file_name"), + user=request.user, + **imp_kwargs, + ) def process_result(self, result, request): self.generate_log_entries(result, request) self.add_success_message(result, request) post_import.send(sender=None, model=self.model) - url = reverse('admin:%s_%s_changelist' % self.get_model_info(), - current_app=self.admin_site.name) + url = reverse( + "admin:%s_%s_changelist" % self.get_model_info(), + current_app=self.admin_site.name, + ) return HttpResponseRedirect(url) def generate_log_entries(self, result, request): if not self.get_skip_admin_log(): # Add imported objects to LogEntry - logentry_map = { - RowResult.IMPORT_TYPE_NEW: ADDITION, - RowResult.IMPORT_TYPE_UPDATE: CHANGE, - RowResult.IMPORT_TYPE_DELETE: DELETION, - } - content_type_id = ContentType.objects.get_for_model(self.model).pk - for row in result: - if row.import_type != row.IMPORT_TYPE_ERROR and row.import_type != row.IMPORT_TYPE_SKIP: - LogEntry.objects.log_action( - user_id=request.user.pk, - content_type_id=content_type_id, - object_id=row.object_id, - object_repr=row.object_repr, - action_flag=logentry_map[row.import_type], - change_message=_("%s through import_export" % row.import_type), - ) + if django.VERSION >= (5, 1): + self._log_actions(result, request) + else: + logentry_map = { + RowResult.IMPORT_TYPE_NEW: ADDITION, + RowResult.IMPORT_TYPE_UPDATE: CHANGE, + RowResult.IMPORT_TYPE_DELETE: DELETION, + } + content_type_id = ContentType.objects.get_for_model(self.model).pk + for row in result: + if row.import_type in logentry_map.keys(): + with warnings.catch_warnings(): + if django.VERSION >= (5,): + from django.utils.deprecation import ( + RemovedInDjango60Warning, + ) + + cat = RemovedInDjango60Warning + else: + cat = DeprecationWarning + warnings.simplefilter("ignore", category=cat) + LogEntry.objects.log_action( + user_id=request.user.pk, + content_type_id=content_type_id, + object_id=row.object_id, + object_repr=row.object_repr, + action_flag=logentry_map[row.import_type], + change_message=_( + "%s through import_export" % row.import_type + ), + ) def add_success_message(self, result, request): opts = self.model._meta - success_message = _('Import finished, with {} new and ' \ - '{} updated {}.').format(result.totals[RowResult.IMPORT_TYPE_NEW], - result.totals[RowResult.IMPORT_TYPE_UPDATE], - opts.verbose_name_plural) + success_message = _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format( + result.totals[RowResult.IMPORT_TYPE_NEW], + result.totals[RowResult.IMPORT_TYPE_UPDATE], + result.totals[RowResult.IMPORT_TYPE_DELETE], + result.totals[RowResult.IMPORT_TYPE_SKIP], + opts.verbose_name_plural, + ) messages.success(request, success_message) @@ -174,58 +276,167 @@ def get_import_context_data(self, **kwargs): def get_context_data(self, **kwargs): return {} - def get_import_form(self): + def create_import_form(self, request): + """ + .. versionadded:: 3.0 + + Return a form instance to use for the 'initial' import step. + This method can be extended to make dynamic form updates to the + form after it has been instantiated. You might also look to + override the following: + + * :meth:`~import_export.admin.ImportMixin.get_import_form_class` + * :meth:`~import_export.admin.ImportMixin.get_import_form_kwargs` + * :meth:`~import_export.admin.ImportMixin.get_import_form_initial` + * :meth:`~import_export.mixins.BaseImportMixin.get_import_resource_classes` + """ + formats = self.get_import_formats() + form_class = self.get_import_form_class(request) + kwargs = self.get_import_form_kwargs(request) + + return form_class(formats, self.get_import_resource_classes(request), **kwargs) + + def get_import_form_class(self, request): + """ + .. versionadded:: 3.0 + + Return the form class to use for the 'import' step. If you only have + a single custom form class, you can set the ``import_form_class`` + attribute to change this for your subclass. + """ + return self.import_form_class + + def get_import_form_kwargs(self, request): + """ + .. versionadded:: 3.0 + + Return a dictionary of values with which to initialize the 'import' + form (including the initial values returned by + :meth:`~import_export.admin.ImportMixin.get_import_form_initial`). + """ + return { + "data": request.POST or None, + "files": request.FILES or None, + "initial": self.get_import_form_initial(request), + } + + def get_import_form_initial(self, request): + """ + .. versionadded:: 3.0 + + Return a dictionary of initial field values to be provided to the + 'import' form. + """ + return {} + + def create_confirm_form(self, request, import_form=None): """ - Get the form type used to read the import format and file. + .. versionadded:: 3.0 + + Return a form instance to use for the 'confirm' import step. + This method can be extended to make dynamic form updates to the + form after it has been instantiated. You might also look to + override the following: + + * :meth:`~import_export.admin.ImportMixin.get_confirm_form_class` + * :meth:`~import_export.admin.ImportMixin.get_confirm_form_kwargs` + * :meth:`~import_export.admin.ImportMixin.get_confirm_form_initial` """ - return ImportForm + form_class = self.get_confirm_form_class(request) + kwargs = self.get_confirm_form_kwargs(request, import_form) + return form_class(**kwargs) - def get_confirm_import_form(self): + def get_confirm_form_class(self, request): """ - Get the form type (class) used to confirm the import. + .. versionadded:: 3.0 + + Return the form class to use for the 'confirm' import step. If you only + have a single custom form class, you can set the ``confirm_form_class`` + attribute to change this for your subclass. """ - return ConfirmImportForm + return self.confirm_form_class - def get_form_kwargs(self, form, *args, **kwargs): + def get_confirm_form_kwargs(self, request, import_form=None): """ - Prepare/returns kwargs for the import form. + .. versionadded:: 3.0 - To distinguish between import and confirm import forms, - the following approach may be used: + Return a dictionary of values with which to initialize the 'confirm' + form (including the initial values returned by + :meth:`~import_export.admin.ImportMixin.get_confirm_form_initial`). + """ + if import_form: + # When initiated from `import_action()`, the 'posted' data + # is for the 'import' form, not this one. + data = None + files = None + else: + data = request.POST or None + files = request.FILES or None + + return { + "data": data, + "files": files, + "initial": self.get_confirm_form_initial(request, import_form), + } - if isinstance(form, ImportForm): - # your code here for the import form kwargs - # e.g. update.kwargs({...}) - elif isinstance(form, ConfirmImportForm): - # your code here for the confirm import form kwargs - # e.g. update.kwargs({...}) - ... + def get_confirm_form_initial(self, request, import_form): """ - return kwargs + .. versionadded:: 3.0 - def get_import_data_kwargs(self, request, *args, **kwargs): + Return a dictionary of initial field values to be provided to the + 'confirm' form. + """ + if import_form is None: + return {} + return { + "import_file_name": import_form.cleaned_data[ + "import_file" + ].tmp_storage_name, + "original_file_name": import_form.cleaned_data["import_file"].name, + "format": import_form.cleaned_data["format"], + "resource": import_form.cleaned_data.get("resource", ""), + } + + def get_import_data_kwargs(self, **kwargs): """ Prepare kwargs for import_data. """ - form = kwargs.get('form') + form = kwargs.get("form") if form: - kwargs.pop('form') + kwargs.pop("form") return kwargs - return {} + return kwargs def write_to_tmp_storage(self, import_file, input_format): - tmp_storage = self.get_tmp_storage_class()() - data = bytes() + encoding = None + if not input_format.is_binary(): + encoding = self.from_encoding + + tmp_storage_cls = self.get_tmp_storage_class() + tmp_storage = tmp_storage_cls( + encoding=encoding, + read_mode=input_format.get_read_mode(), + **self.get_tmp_storage_class_kwargs(), + ) + data = b"" for chunk in import_file.chunks(): data += chunk - tmp_storage.save(data, input_format.get_read_mode()) + tmp_storage.save(data) return tmp_storage - def import_action(self, request, *args, **kwargs): + def add_data_read_fail_error_to_form(self, form, e): + exc_name = repr(type(e).__name__) + msg = _( + "%(exc_name)s encountered while trying to read file. " + "Ensure you have chosen the correct format for the file." + ) % {"exc_name": exc_name} + form.add_error("import_file", msg) + + def import_action(self, request, **kwargs): """ Perform a dry_run of the import to make sure the import will not - result in errors. If there where no error, save the user + result in errors. If there are no errors, save the user uploaded file to a local temp file that will be used by 'process_import' for the actual import. """ @@ -235,99 +446,189 @@ def import_action(self, request, *args, **kwargs): context = self.get_import_context_data() import_formats = self.get_import_formats() - form_type = self.get_import_form() - form_kwargs = self.get_form_kwargs(form_type, *args, **kwargs) - form = form_type(import_formats, - request.POST or None, - request.FILES or None, - **form_kwargs) - - if request.POST and form.is_valid(): - input_format = import_formats[ - int(form.cleaned_data['input_format']) - ]() - import_file = form.cleaned_data['import_file'] - # first always write the uploaded file to disk as it may be a - # memory file or else based on settings upload handlers - tmp_storage = self.write_to_tmp_storage(import_file, input_format) - - # then read the file, using the proper format-specific mode - # warning, big files may exceed memory - try: - data = tmp_storage.read(input_format.get_read_mode()) - if not input_format.is_binary() and self.from_encoding: - data = force_str(data, self.from_encoding) - dataset = input_format.create_dataset(data) - except UnicodeDecodeError as e: - return HttpResponse(_(u"

Imported file has a wrong encoding: %s

" % e)) - except Exception as e: - return HttpResponse(_(u"

%s encountered while trying to read file: %s

" % (type(e).__name__, import_file.name))) - - # prepare kwargs for import data, if needed - res_kwargs = self.get_import_resource_kwargs(request, form=form, *args, **kwargs) - resource = self.get_import_resource_class()(**res_kwargs) - - # prepare additional kwargs for import_data, if needed - imp_kwargs = self.get_import_data_kwargs(request, form=form, *args, **kwargs) - result = resource.import_data(dataset, dry_run=True, - raise_errors=False, - file_name=import_file.name, - user=request.user, - **imp_kwargs) - - context['result'] = result - - if not result.has_errors() and not result.has_validation_errors(): - initial = { - 'import_file_name': tmp_storage.name, - 'original_file_name': import_file.name, - 'input_format': form.cleaned_data['input_format'], - } - confirm_form = self.get_confirm_import_form() - initial = self.get_form_kwargs(form=form, **initial) - context['confirm_form'] = confirm_form(initial=initial) + import_form = self.create_import_form(request) + resources = [] + if request.POST and import_form.is_valid(): + input_format = import_formats[int(import_form.cleaned_data["format"])]() + if not input_format.is_binary(): + input_format.encoding = self.from_encoding + import_file = import_form.cleaned_data["import_file"] + + if self.is_skip_import_confirm_enabled(): + # This setting means we are going to skip the import confirmation step. + # Go ahead and process the file for import in a transaction + # If there are any errors, we roll back the transaction. + # rollback_on_validation_errors is set to True so that we rollback on + # validation errors. If this is not done validation errors would be + # silently skipped. + data = b"" + for chunk in import_file.chunks(): + data += chunk + try: + dataset = input_format.create_dataset(data) + except Exception as e: + self.add_data_read_fail_error_to_form(import_form, e) + if not import_form.errors: + result = self.process_dataset( + dataset, + import_form, + request, + raise_errors=False, + rollback_on_validation_errors=True, + **kwargs, + ) + if not result.has_errors() and not result.has_validation_errors(): + return self.process_result(result, request) + else: + context["result"] = result + else: + # first always write the uploaded file to disk as it may be a + # memory file or else based on settings upload handlers + tmp_storage = self.write_to_tmp_storage(import_file, input_format) + # allows get_confirm_form_initial() to include both the + # original and saved file names from form.cleaned_data + import_file.tmp_storage_name = tmp_storage.name + + try: + # then read the file, using the proper format-specific mode + # warning, big files may exceed memory + data = tmp_storage.read() + dataset = input_format.create_dataset(data) + except Exception as e: + self.add_data_read_fail_error_to_form(import_form, e) + else: + if len(dataset) == 0: + import_form.add_error( + "import_file", + _( + "No valid data to import. Ensure your file " + "has the correct headers or data for import." + ), + ) + + if not import_form.errors: + # prepare kwargs for import data, if needed + res_kwargs = self.get_import_resource_kwargs( + request, form=import_form, **kwargs + ) + resource = self.choose_import_resource_class(import_form, request)( + **res_kwargs + ) + resources = [resource] + + # prepare additional kwargs for import_data, if needed + imp_kwargs = self.get_import_data_kwargs( + request=request, form=import_form, **kwargs + ) + result = resource.import_data( + dataset, + dry_run=True, + raise_errors=False, + file_name=import_file.name, + user=request.user, + **imp_kwargs, + ) + context["result"] = result + + if not result.has_errors() and not result.has_validation_errors(): + context["confirm_form"] = self.create_confirm_form( + request, import_form=import_form + ) else: - res_kwargs = self.get_import_resource_kwargs(request, form=form, *args, **kwargs) - resource = self.get_import_resource_class()(**res_kwargs) + res_kwargs = self.get_import_resource_kwargs( + request=request, form=import_form, **kwargs + ) + resource_classes = self.get_import_resource_classes(request) + resources = [ + resource_class(**res_kwargs) for resource_class in resource_classes + ] context.update(self.admin_site.each_context(request)) - context['title'] = _("Import") - context['form'] = form - context['opts'] = self.model._meta - context['fields'] = [f.column_name for f in resource.get_user_visible_fields()] + context["title"] = _("Import") + context["form"] = import_form + context["opts"] = self.model._meta + context["media"] = self.media + import_form.media + context["fields_list"] = [ + ( + resource.get_display_name(), + [f.column_name for f in resource.get_user_visible_fields()], + ) + for resource in resources + ] + context["import_error_display"] = self.import_error_display request.current_app = self.admin_site.name - return TemplateResponse(request, [self.import_template_name], - context) + return TemplateResponse(request, [self.import_template_name], context) def changelist_view(self, request, extra_context=None): if extra_context is None: extra_context = {} - extra_context['has_import_permission'] = self.has_import_permission(request) + extra_context["has_import_permission"] = self.has_import_permission(request) return super().changelist_view(request, extra_context) + def _log_actions(self, result, request): + """ + Create appropriate LogEntry instances for the result. + """ + rows = {} + for row in result: + rows.setdefault(row.import_type, []) + rows[row.import_type].append(row.instance) + + self._create_log_entries(request.user.pk, rows) + + def _create_log_entries(self, user_pk, rows): + logentry_map = { + RowResult.IMPORT_TYPE_NEW: ADDITION, + RowResult.IMPORT_TYPE_UPDATE: CHANGE, + RowResult.IMPORT_TYPE_DELETE: DELETION, + } + for import_type, instances in rows.items(): + if import_type in logentry_map.keys(): + action_flag = logentry_map[import_type] + self._create_log_entry( + user_pk, rows[import_type], import_type, action_flag + ) + + def _create_log_entry(self, user_pk, rows, import_type, action_flag): + if len(rows) > 0: + LogEntry.objects.log_actions( + user_pk, + rows, + action_flag, + change_message=_("%s through import_export" % import_type), + single_object=len(rows) == 1, + ) + class ExportMixin(BaseExportMixin, ImportExportMixinBase): """ Export mixin. - This is intended to be mixed with django.contrib.admin.ModelAdmin - https://docs.djangoproject.com/en/dev/ref/contrib/admin/ + This is intended to be mixed with + `ModelAdmin `_. """ + #: template for change_list view - change_list_template = 'admin/import_export/change_list_export.html' + import_export_change_list_template = "admin/import_export/change_list_export.html" #: template for export view - export_template_name = 'admin/import_export/export.html' + export_template_name = "admin/import_export/export.html" #: export data encoding to_encoding = None + #: Form class to use for the initial export step. + #: Assign to :class:`~import_export.forms.ExportForm` if you would + #: like to disable selectable fields feature. + export_form_class = SelectableFieldsExportForm def get_urls(self): urls = super().get_urls() my_urls = [ - path('export/', + path( + "export/", self.admin_site.admin_view(self.export_action), - name='%s_%s_export' % self.get_model_info()), + name="%s_%s_export" % self.get_model_info(), + ), ] return my_urls + urls @@ -335,59 +636,85 @@ def has_export_permission(self, request): """ Returns whether a request has export permission. """ - EXPORT_PERMISSION_CODE = getattr(settings, 'IMPORT_EXPORT_EXPORT_PERMISSION_CODE', None) + EXPORT_PERMISSION_CODE = getattr( + settings, "IMPORT_EXPORT_EXPORT_PERMISSION_CODE", None + ) if EXPORT_PERMISSION_CODE is None: return True opts = self.opts codename = get_permission_codename(EXPORT_PERMISSION_CODE, opts) - return request.user.has_perm("%s.%s" % (opts.app_label, codename)) + return request.user.has_perm(f"{opts.app_label}.{codename}") def get_export_queryset(self, request): """ - Returns export queryset. + Returns export queryset. The queryset is obtained by calling + ModelAdmin + `get_queryset() + `_. Default implementation respects applied search and filters. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) + list_select_related = self.get_list_select_related(request) list_filter = self.get_list_filter(request) search_fields = self.get_search_fields(request) if self.get_actions(request): - list_display = ['action_checkbox'] + list(list_display) + list_display = ["action_checkbox"] + list(list_display) ChangeList = self.get_changelist(request) changelist_kwargs = { - 'request': request, - 'model': self.model, - 'list_display': list_display, - 'list_display_links': list_display_links, - 'list_filter': list_filter, - 'date_hierarchy': self.date_hierarchy, - 'search_fields': search_fields, - 'list_select_related': self.list_select_related, - 'list_per_page': self.list_per_page, - 'list_max_show_all': self.list_max_show_all, - 'list_editable': self.list_editable, - 'model_admin': self, + "request": request, + "model": self.model, + "list_display": list_display, + "list_display_links": list_display_links, + "list_filter": list_filter, + "date_hierarchy": self.date_hierarchy, + "search_fields": search_fields, + "list_select_related": list_select_related, + "list_per_page": self.list_per_page, + "list_max_show_all": self.list_max_show_all, + "list_editable": self.list_editable, + "model_admin": self, + "sortable_by": self.sortable_by, } - if django.VERSION >= (2, 1): - changelist_kwargs['sortable_by'] = self.sortable_by - if django.VERSION >= (4, 0): - changelist_kwargs['search_help_text'] = self.search_help_text - cl = ChangeList(**changelist_kwargs) - + changelist_kwargs["search_help_text"] = self.search_help_text + + class ExportChangeList(ChangeList): + def get_results(self, request): + """ + Overrides ChangeList.get_results() to bypass default operations like + pagination and result counting, which are not needed for export. This + prevents executing unnecessary COUNT queries during ChangeList + initialization. + """ + pass + + cl = ExportChangeList(**changelist_kwargs) + + # get_queryset() is already called during initialization, + # it is enough to get its results + if hasattr(cl, "queryset"): + return cl.queryset + + # Fallback in case the ChangeList doesn't have queryset attribute set return cl.get_queryset(request) - def get_export_data(self, file_format, queryset, *args, **kwargs): + def get_export_data(self, file_format, request, queryset, **kwargs): """ Returns file_format representation for given queryset. """ - request = kwargs.pop("request") if not self.has_export_permission(request): raise PermissionDenied - data = self.get_data_for_export(request, queryset, *args, **kwargs) + force_native_type = type(file_format) in BINARY_FORMATS + data = self.get_data_for_export( + request, + queryset, + force_native_type=force_native_type, + **kwargs, + ) export_data = file_format.export_data(data) encoding = kwargs.get("encoding") if not file_format.is_binary() and encoding: @@ -400,55 +727,138 @@ def get_export_context_data(self, **kwargs): def get_context_data(self, **kwargs): return {} - def export_action(self, request, *args, **kwargs): + def get_export_form_class(self): + """ + Get the form class used to read the export format. + """ + return self.export_form_class + + def export_action(self, request): + """ + Handles the default workflow for both the export form and the + export of data to file. + """ if not self.has_export_permission(request): raise PermissionDenied + form_type = self.get_export_form_class() formats = self.get_export_formats() - form = ExportForm(formats, request.POST or None) - if form.is_valid(): - file_format = formats[ - int(form.cleaned_data['file_format']) - ]() - - queryset = self.get_export_queryset(request) - export_data = self.get_export_data(file_format, queryset, request=request, encoding=self.to_encoding) - content_type = file_format.get_content_type() - response = HttpResponse(export_data, content_type=content_type) - response['Content-Disposition'] = 'attachment; filename="%s"' % ( - self.get_export_filename(request, queryset, file_format), + queryset = self.get_export_queryset(request) + if self.is_skip_export_form_enabled(): + return self._do_file_export(formats[0](), request, queryset) + + form = form_type( + formats, + self.get_export_resource_classes(request), + data=request.POST or None, + ) + if request.POST and "export_items" in request.POST: + # this field is instantiated if the export is POSTed from the + # 'action' drop down + form.fields["export_items"] = MultipleChoiceField( + widget=MultipleHiddenInput, + required=False, + choices=[(pk, pk) for pk in self.get_valid_export_item_pks(request)], ) + if form.is_valid(): + file_format = formats[int(form.cleaned_data["format"])]() - post_export.send(sender=None, model=self.model) - return response - - context = self.get_export_context_data() + if "export_items" in form.changed_data: + # this request has arisen from an Admin UI action + # export item pks are stored in form data + # so generate the queryset from the stored pks + queryset = queryset.filter(pk__in=form.cleaned_data["export_items"]) - context.update(self.admin_site.each_context(request)) + try: + return self._do_file_export( + file_format, request, queryset, export_form=form + ) + except (ValueError, FieldError) as e: + messages.error(request, str(e)) - context['title'] = _("Export") - context['form'] = form - context['opts'] = self.model._meta + context = self.init_request_context_data(request, form) request.current_app = self.admin_site.name - return TemplateResponse(request, [self.export_template_name], - context) + return TemplateResponse(request, [self.export_template_name], context=context) + + def get_valid_export_item_pks(self, request): + """ + DEPRECATED: This method is deprecated and will be removed in the future. + Overwrite get_queryset() or get_export_queryset() instead. + + Returns a list of valid pks for export. + This is used to validate which objects can be exported when exports are + triggered from the Admin UI 'action' dropdown. + This can be overridden to filter returned pks for performance and/or security + reasons. + + :param request: The request object. + :returns: a list of valid pks (by default is all pks in table). + """ + cls = self.__class__ + warnings.warn( + "The 'get_valid_export_item_pks()' method in " + f"{cls.__module__}.{cls.__qualname__} " + "is deprecated and will " + "be removed in a future release", + DeprecationWarning, + ) + return self.model.objects.all().values_list("pk", flat=True) def changelist_view(self, request, extra_context=None): if extra_context is None: extra_context = {} - extra_context['has_export_permission'] = self.has_export_permission(request) + extra_context["has_export_permission"] = self.has_export_permission(request) return super().changelist_view(request, extra_context) def get_export_filename(self, request, queryset, file_format): return super().get_export_filename(file_format) + def init_request_context_data(self, request, form): + context = self.get_export_context_data() + context.update(self.admin_site.each_context(request)) + context["title"] = _("Export") + context["form"] = form + context["opts"] = self.model._meta + context["fields_list"] = [ + ( + res.get_display_name(), + [ + field.column_name + for field in res( + **self.get_export_resource_kwargs(request) + ).get_user_visible_fields() + ], + ) + for res in self.get_export_resource_classes(request) + ] + return context + + def _do_file_export(self, file_format, request, queryset, export_form=None): + export_data = self.get_export_data( + file_format, + request, + queryset, + encoding=self.to_encoding, + export_form=export_form, + ) + content_type = file_format.get_content_type() + response = HttpResponse(export_data, content_type=content_type) + response["Content-Disposition"] = 'attachment; filename="{}"'.format( + self.get_export_filename(request, queryset, file_format), + ) + post_export.send(sender=None, model=self.model) + return response + class ImportExportMixin(ImportMixin, ExportMixin): """ Import and export mixin. """ + #: template for change_list view - change_list_template = 'admin/import_export/change_list_import_export.html' + import_export_change_list_template = ( + "admin/import_export/change_list_import_export.html" + ) class ImportExportModelAdmin(ImportExportMixin, admin.ModelAdmin): @@ -462,64 +872,88 @@ class ExportActionMixin(ExportMixin): Mixin with export functionality implemented as an admin action. """ - # Don't use custom change list template. - change_list_template = None + #: template for change form + change_form_template = "admin/import_export/change_form.html" - def __init__(self, *args, **kwargs): - """ - Adds a custom action form initialized with the available export - formats. - """ - choices = [] - formats = self.get_export_formats() - if formats: - choices.append(('', '---')) - for i, f in enumerate(formats): - choices.append((str(i), f().get_title())) + #: Flag to indicate whether to show 'export' button on change form + show_change_form_export = True - self.action_form = export_action_form_factory(choices) - super().__init__(*args, **kwargs) + # This action will receive a selection of items as a queryset, + # store them in the context, and then render the 'export' admin form page, + # so that users can select file format and resource + + def change_view(self, request, object_id, form_url="", extra_context=None): + extra_context = extra_context or {} + extra_context["show_change_form_export"] = ( + self.show_change_form_export and self.has_export_permission(request) + ) + return super().change_view( + request, + object_id, + form_url, + extra_context=extra_context, + ) + + def response_change(self, request, obj): + # called if the export is triggered from the instance detail page. + if "_export-item" in request.POST: + return self.export_admin_action( + request, self.model.objects.filter(pk=obj.pk) + ) + return super().response_change(request, obj) def export_admin_action(self, request, queryset): """ - Exports the selected rows using file_format. + Action runs on POST from instance action menu (if enabled). """ - export_format = request.POST.get('file_format') + formats = self.get_export_formats() + if self.is_skip_export_form_from_action_enabled(): + file_format = formats[0]() + return self._do_file_export(file_format, request, queryset) - if not export_format: - messages.warning(request, _('You must select an export format.')) - else: - formats = self.get_export_formats() - file_format = formats[int(export_format)]() - - export_data = self.get_export_data(file_format, queryset, request=request, encoding=self.to_encoding) - content_type = file_format.get_content_type() - response = HttpResponse(export_data, content_type=content_type) - response['Content-Disposition'] = 'attachment; filename="%s"' % ( - self.get_export_filename(request, queryset, file_format), + form_type = self.get_export_form_class() + formats = self.get_export_formats() + export_items = list(queryset.values_list("pk", flat=True)) + form = form_type( + formats=formats, + resources=self.get_export_resource_classes(request), + initial={"export_items": export_items}, + ) + # selected items are to be stored as a hidden input on the form + form.fields["export_items"] = MultipleChoiceField( + widget=MultipleHiddenInput, required=False, choices=export_items + ) + context = self.init_request_context_data(request, form) + + # this is necessary to render the FORM action correctly + # i.e. so the POST goes to the correct URL + export_url = reverse( + "%s:%s_%s_export" + % ( + self.admin_site.name, + self.model._meta.app_label, + self.model._meta.model_name, ) - return response + ) + context["export_url"] = export_url + + return render(request, "admin/import_export/export.html", context=context) def get_actions(self, request): """ Adds the export action to the list of available actions. """ - actions = super().get_actions(request) - actions.update( - export_admin_action=( - ExportActionMixin.export_admin_action, - "export_admin_action", - _("Export selected %(verbose_name_plural)s"), + if self.has_export_permission(request): + actions.update( + export_admin_action=( + type(self).export_admin_action, + "export_admin_action", + _("Export selected %(verbose_name_plural)s"), + ) ) - ) return actions - @property - def media(self): - super_media = super().media - return forms.Media(js=super_media._js + ['import_export/action_formats.js'], css=super_media._css) - class ExportActionModelAdmin(ExportActionMixin, admin.ModelAdmin): """ diff --git a/import_export/command_utils.py b/import_export/command_utils.py new file mode 100644 index 000000000..a1c07aff8 --- /dev/null +++ b/import_export/command_utils.py @@ -0,0 +1,80 @@ +from django.apps import apps +from django.core.management.base import CommandError +from django.http.response import mimetypes +from django.utils.module_loading import import_string + +from import_export.formats.base_formats import DEFAULT_FORMATS +from import_export.resources import modelresource_factory + + +def get_resource_class(model_or_resource_class): + try: + # First, try to load it as a resource class + resource_class = import_string(model_or_resource_class) + return resource_class + except ImportError: + pass + + try: + if model_or_resource_class.count(".") == 1: + app_label, model_name = model_or_resource_class.split(".") + model = apps.get_model(app_label, model_name) + if model: + resource_class = modelresource_factory(model) + return resource_class + except LookupError: + pass + + raise CommandError( + f"Cannot import '{model_or_resource_class}' as a resource class or model." + ) + + +MIME_TYPE_FORMAT_MAPPING = {format.CONTENT_TYPE: format for format in DEFAULT_FORMATS} + + +def get_format_class(format_name, file_name, encoding=None): + if format_name: + try: + # Direct import attempt + format_class = import_string(format_name) + except ImportError: + # Fallback to base_formats + fallback_format_name = f"import_export.formats.base_formats.{format_name}" + try: + format_class = import_string(fallback_format_name) + except ImportError: + # fallback to uppercase format name + try: + format_class = import_string( + f"import_export.formats.base_formats.{format_name.upper()}" + ) + except ImportError: + raise CommandError( + f"Cannot import '{format_name}' or '{fallback_format_name}'" + " format class." + ) + return format_class(encoding=encoding) + + else: + # Determine MIME type from file name + mimetype, file_encoding = mimetypes.guess_type(file_name) + + if not mimetype: + raise CommandError( + f"Cannot determine MIME type for '{file_name}'. " + " Please specify format with --format." + ) + + try: + format_class = MIME_TYPE_FORMAT_MAPPING[mimetype] + return format_class(encoding=encoding or file_encoding) + except KeyError: + raise CommandError( + f"Cannot find format for MIME type '{mimetype}'." + " Please specify format with --format." + ) + + +def get_default_format_names(): + return ", ".join([f.__name__ for f in DEFAULT_FORMATS]) diff --git a/import_export/declarative.py b/import_export/declarative.py new file mode 100644 index 000000000..146cfdf96 --- /dev/null +++ b/import_export/declarative.py @@ -0,0 +1,171 @@ +import logging +import warnings +from collections import OrderedDict + +from django.apps import apps +from django.core.exceptions import FieldDoesNotExist +from django.db.models.fields.related import ForeignObjectRel + +from import_export.options import ResourceOptions + +from .fields import Field +from .instance_loaders import ModelInstanceLoader +from .utils import get_related_model + +logger = logging.getLogger(__name__) + + +class DeclarativeMetaclass(type): + def __new__(cls, name, bases, attrs): + def _load_meta_options(base_, meta_): + options = getattr(base_, "Meta", None) + + for option in [ + option + for option in dir(options) + if not option.startswith("_") and hasattr(options, option) + ]: + option_value = getattr(options, option) + if option == "model" and isinstance(option_value, str): + option_value = apps.get_model(option_value) + + setattr(meta_, option, option_value) + + declared_fields = [] + meta = ResourceOptions() + + # If this class is subclassing another Resource, add that Resource's + # fields. Note that we loop over the bases in *reverse*. This is + # necessary in order to preserve the correct order of fields. + for base in bases[::-1]: + if hasattr(base, "fields"): + declared_fields = list(base.fields.items()) + declared_fields + # Collect the Meta options + # #1363 If there are any parent classes, set those options first + for parent in base.__bases__: + _load_meta_options(parent, meta) + _load_meta_options(base, meta) + + # Add direct fields + for field_name, obj in attrs.copy().items(): + if isinstance(obj, Field): + field = attrs.pop(field_name) + if not field.column_name: + field.column_name = field_name + declared_fields.append((field_name, field)) + + attrs["fields"] = OrderedDict(declared_fields) + new_class = super().__new__(cls, name, bases, attrs) + # add direct fields + _load_meta_options(new_class, meta) + new_class._meta = meta + + return new_class + + +class ModelDeclarativeMetaclass(DeclarativeMetaclass): + def __new__(cls, name, bases, attrs): + new_class = super().__new__(cls, name, bases, attrs) + + opts = new_class._meta + + if not opts.instance_loader_class: + opts.instance_loader_class = ModelInstanceLoader + + if opts.model: + model_opts = opts.model._meta + + # #1693 check the fields explicitly declared as attributes of the Resource + # class. + # if 'fields' property is defined, declared fields can only be included + # if they appear in the 'fields' iterable. + declared_fields = {} + for field_name, field in new_class.fields.items(): + column_name = field.column_name + if ( + opts.fields is not None + and field_name not in opts.fields + and column_name not in opts.fields + ): + warnings.warn( + f"ignoring field '{field_name}' because not declared " + "in 'fields' whitelist", + stacklevel=2, + ) + continue + declared_fields[field_name] = field + + field_list = [] + for f in sorted(model_opts.fields + model_opts.many_to_many): + if opts.fields is not None and f.name not in opts.fields: + continue + if opts.exclude and f.name in opts.exclude: + continue + + if f.name in set(declared_fields.keys()): + # If model field is declared in `ModelResource`, + # remove it from `declared_fields` + # to keep exact order of model fields + field = declared_fields.pop(f.name) + else: + field = new_class.field_from_django_field(f.name, f, readonly=False) + + field_list.append( + ( + f.name, + field, + ) + ) + + # Order as model fields first then declared fields by default + new_class.fields = OrderedDict([*field_list, *declared_fields.items()]) + + # add fields that follow relationships + if opts.fields is not None: + field_list = [] + for field_name in opts.fields: + if field_name in declared_fields: + continue + if field_name.find("__") == -1: + continue + + model = opts.model + attrs = field_name.split("__") + for i, attr in enumerate(attrs): + verbose_path = ".".join( + [opts.model.__name__] + attrs[0 : i + 1] + ) + + try: + f = model._meta.get_field(attr) + except FieldDoesNotExist as e: + logger.debug(e, exc_info=e) + raise FieldDoesNotExist( + "%s: %s has no field named '%s'" + % (verbose_path, model.__name__, attr) + ) + + if i < len(attrs) - 1: + # We're not at the last attribute yet, so check + # that we're looking at a relation, and move on to + # the next model. + if isinstance(f, ForeignObjectRel): + model = get_related_model(f) + else: + if get_related_model(f) is None: + raise KeyError( + "%s is not a relation" % verbose_path + ) + model = get_related_model(f) + + if isinstance(f, ForeignObjectRel): + f = f.field + + field = new_class.field_from_django_field( + field_name, f, readonly=True + ) + field_list.append((field_name, field)) + + new_class.fields.update(OrderedDict(field_list)) + + return new_class diff --git a/import_export/exceptions.py b/import_export/exceptions.py index 94a992e0c..c807fa3d8 100644 --- a/import_export/exceptions.py +++ b/import_export/exceptions.py @@ -1,8 +1,38 @@ class ImportExportError(Exception): """A generic exception for all others to extend.""" + pass class FieldError(ImportExportError): """Raised when a field encounters an error.""" + + pass + + +class WidgetError(ImportExportError): + """Raised when there is a misconfiguration with a Widget.""" + pass + + +class ImportError(ImportExportError): + def __init__(self, error, number=None, row=None): + """A wrapper for errors thrown from the import process. + + :param error: The underlying error that occurred. + :param number: The row number of the row containing the error (if obtainable). + :param row: The row containing the error (if obtainable). + """ + self.error = error + self.number = number + self.row = row + + def __str__(self): + s = "" + if self.number is not None: + s += f"{self.number}: " + s += f"{self.error}" + if self.row is not None: + s += f" ({self.row})" + return s diff --git a/import_export/fields.py b/import_export/fields.py index 36b81b996..0ad362516 100644 --- a/import_export/fields.py +++ b/import_export/fields.py @@ -3,35 +3,56 @@ from django.db.models.manager import Manager from . import widgets +from .exceptions import FieldError class Field: """ - Field represent mapping between `object` field and representation of - this field. + ``Field`` represents a mapping between an ``instance`` field and a representation of + the field's data. - :param attribute: A string of either an instance attribute or callable off - the object. + :param attribute: A string of either an instance attribute or callable of + the instance. - :param column_name: Lets you provide a name for the column that represents + :param column_name: An optional column name for the column that represents this field in the export. :param widget: Defines a widget that will be used to represent this - field's data in the export. + field's data in the export, or transform the value during import. :param readonly: A Boolean which defines if this field will be ignored during import. :param default: This value will be returned by - :meth:`~import_export.fields.Field.clean` if this field's widget did - not return an adequate value. + :meth:`~import_export.fields.Field.clean` if this field's widget returned + a value defined in :attr:`~import_export.fields.empty_values`. - :param saves_null_values: Controls whether null values are saved on the object + :param saves_null_values: Controls whether null values are saved on the instance. + This can be used if the widget returns null, but there is a default instance + value which should not be overwritten. + + :param dehydrate_method: You can provide a `dehydrate_method` as a string to use + instead of the default `dehydrate_{field_name}` syntax, or you can provide + a callable that will be executed with the instance as its argument. + + :param m2m_add: changes save of this field to add the values, if they do not exist, + to a ManyToMany field instead of setting all values. Only useful if field is + a ManyToMany field. """ - empty_values = [None, ''] - def __init__(self, attribute=None, column_name=None, widget=None, - default=NOT_PROVIDED, readonly=False, saves_null_values=True): + empty_values = [None, ""] + + def __init__( + self, + attribute=None, + column_name=None, + widget=None, + default=NOT_PROVIDED, + readonly=False, + saves_null_values=True, + dehydrate_method=None, + m2m_add=False, + ): self.attribute = attribute self.default = default self.column_name = column_name @@ -40,30 +61,32 @@ def __init__(self, attribute=None, column_name=None, widget=None, self.widget = widget self.readonly = readonly self.saves_null_values = saves_null_values + self.dehydrate_method = dehydrate_method + self.m2m_add = m2m_add def __repr__(self): """ Displays the module, class and name of the field. """ - path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) - column_name = getattr(self, 'column_name', None) - if column_name is not None: - return '<%s: %s>' % (path, column_name) - return '<%s>' % path + path = f"{self.__class__.__module__}.{self.__class__.__name__}" + if self.column_name is not None: + return f"<{path}: {self.column_name}>" + return "<%s>" % path - def clean(self, data, **kwargs): + def clean(self, row, **kwargs): """ Translates the value stored in the imported datasource to an appropriate Python object and returns it. """ try: - value = data[self.column_name] + value = row[self.column_name] except KeyError: - raise KeyError("Column '%s' not found in dataset. Available " - "columns are: %s" % (self.column_name, list(data))) + raise KeyError( + "Column '%s' not found in dataset. Available " + "columns are: %s" % (self.column_name, list(row)) + ) - # If ValueError is raised here, import_obj() will handle it - value = self.widget.clean(value, row=data, **kwargs) + value = self.widget.clean(value, row=row, **kwargs) if value in self.empty_values and self.default != NOT_PROVIDED: if callable(self.default): @@ -72,15 +95,22 @@ def clean(self, data, **kwargs): return value - def get_value(self, obj): + def get_value(self, instance): """ - Returns the value of the object's attribute. + Returns the value of the instance's attribute. """ + + # The objects of a queryset can be dictionaries if the values method is used. + if isinstance(instance, dict): + if self.attribute not in instance: + return None + return instance[self.attribute] + if self.attribute is None: return None - attrs = self.attribute.split('__') - value = obj + attrs = self.attribute.split("__") + value = instance for attr in attrs: try: @@ -98,28 +128,41 @@ def get_value(self, obj): value = value() return value - def save(self, obj, data, is_m2m=False, **kwargs): + def save(self, instance, row, is_m2m=False, **kwargs): """ - If this field is not declared readonly, the object's attribute will + If this field is not declared readonly, the instance's attribute will be set to the value returned by :meth:`~import_export.fields.Field.clean`. """ if not self.readonly: - attrs = self.attribute.split('__') + attrs = self.attribute.split("__") for attr in attrs[:-1]: - obj = getattr(obj, attr, None) - cleaned = self.clean(data, **kwargs) + instance = getattr(instance, attr, None) + cleaned = self.clean(row, **kwargs) if cleaned is not None or self.saves_null_values: if not is_m2m: - setattr(obj, attrs[-1], cleaned) + setattr(instance, attrs[-1], cleaned) else: - getattr(obj, attrs[-1]).set(cleaned) + if self.m2m_add: + getattr(instance, attrs[-1]).add(*cleaned) + else: + getattr(instance, attrs[-1]).set(cleaned) - def export(self, obj): + def export(self, instance, **kwargs): """ - Returns value from the provided object converted to export + Returns value from the provided instance converted to export representation. """ - value = self.get_value(obj) - if value is None: - return "" - return self.widget.render(value, obj) + value = self.get_value(instance) + return self.widget.render(value, **kwargs) + + def get_dehydrate_method(self, field_name=None): + """ + Returns method name to be used for dehydration of the field. + Defaults to `dehydrate_{field_name}` + """ + DEFAULT_DEHYDRATE_METHOD_PREFIX = "dehydrate_" + + if not self.dehydrate_method and not field_name: + raise FieldError("Both dehydrate_method and field_name are not supplied.") + + return self.dehydrate_method or DEFAULT_DEHYDRATE_METHOD_PREFIX + field_name diff --git a/import_export/formats/base_formats.py b/import_export/formats/base_formats.py index d6bc2461b..a0971da49 100644 --- a/import_export/formats/base_formats.py +++ b/import_export/formats/base_formats.py @@ -1,6 +1,16 @@ -from importlib import import_module +# when adding imports, ensure that they are local to the +# correct class for the file format. +# e.g. add openpyxl imports to the XLSXFormat class +# See issue 2004 +import logging +import warnings import tablib +from django.conf import settings +from django.utils.translation import gettext_lazy as _ +from tablib.formats import registry + +logger = logging.getLogger(__name__) class Format: @@ -29,7 +39,7 @@ def get_read_mode(self): """ Returns mode for opening files. """ - return 'rb' + return "rb" def get_extension(self): """ @@ -40,7 +50,7 @@ def get_extension(self): def get_content_type(self): # For content types see # https://www.iana.org/assignments/media-types/media-types.xhtml - return 'application/octet-stream' + return "application/octet-stream" @classmethod def is_available(cls): @@ -55,20 +65,19 @@ def can_export(self): class TablibFormat(Format): TABLIB_MODULE = None - CONTENT_TYPE = 'application/octet-stream' + CONTENT_TYPE = "application/octet-stream" + + def __init__(self, encoding=None): + self.encoding = encoding def get_format(self): """ Import and returns tablib module. """ - try: - # Available since tablib 1.0 - from tablib.formats import registry - except ImportError: - return import_module(self.TABLIB_MODULE) - else: - key = self.TABLIB_MODULE.split('.')[-1].replace('_', '') - return registry.get_format(key) + if not self.TABLIB_MODULE: + raise AttributeError("TABLIB_MODULE must be defined") + key = self.TABLIB_MODULE.split(".")[-1].replace("_", "") + return registry.get_format(key) @classmethod def is_available(cls): @@ -82,9 +91,11 @@ def get_title(self): return self.get_format().title def create_dataset(self, in_stream, **kwargs): - return tablib.import_set(in_stream, format=self.get_title()) + return tablib.import_set(in_stream, format=self.get_title(), **kwargs) def export_data(self, dataset, **kwargs): + if getattr(settings, "IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT", False) is True: + self._escape_formulae(dataset) return dataset.export(self.get_title(), **kwargs) def get_extension(self): @@ -94,66 +105,75 @@ def get_content_type(self): return self.CONTENT_TYPE def can_import(self): - return hasattr(self.get_format(), 'import_set') + return hasattr(self.get_format(), "import_set") def can_export(self): - return hasattr(self.get_format(), 'export_set') + return hasattr(self.get_format(), "export_set") + + def _escape_formulae(self, dataset): + def _do_escape(s): + return s.replace("=", "", 1) if s.startswith("=") else s + + for r in dataset: + row = dataset.lpop() + row = [_do_escape(str(cell)) for cell in row] + dataset.append(row) class TextFormat(TablibFormat): + def create_dataset(self, in_stream, **kwargs): + if isinstance(in_stream, bytes) and self.encoding: + in_stream = in_stream.decode(self.encoding) + return super().create_dataset(in_stream, **kwargs) + def get_read_mode(self): - return 'r' + return "r" def is_binary(self): return False class CSV(TextFormat): - TABLIB_MODULE = 'tablib.formats._csv' - CONTENT_TYPE = 'text/csv' - - def create_dataset(self, in_stream, **kwargs): - return super().create_dataset(in_stream, **kwargs) + TABLIB_MODULE = "tablib.formats._csv" + CONTENT_TYPE = "text/csv" class JSON(TextFormat): - TABLIB_MODULE = 'tablib.formats._json' - CONTENT_TYPE = 'application/json' + TABLIB_MODULE = "tablib.formats._json" + CONTENT_TYPE = "application/json" class YAML(TextFormat): - TABLIB_MODULE = 'tablib.formats._yaml' + TABLIB_MODULE = "tablib.formats._yaml" # See https://stackoverflow.com/questions/332129/yaml-mime-type - CONTENT_TYPE = 'text/yaml' + CONTENT_TYPE = "text/yaml" class TSV(TextFormat): - TABLIB_MODULE = 'tablib.formats._tsv' - CONTENT_TYPE = 'text/tab-separated-values' - - def create_dataset(self, in_stream, **kwargs): - return super().create_dataset(in_stream, **kwargs) + TABLIB_MODULE = "tablib.formats._tsv" + CONTENT_TYPE = "text/tab-separated-values" class ODS(TextFormat): - TABLIB_MODULE = 'tablib.formats._ods' - CONTENT_TYPE = 'application/vnd.oasis.opendocument.spreadsheet' + TABLIB_MODULE = "tablib.formats._ods" + CONTENT_TYPE = "application/vnd.oasis.opendocument.spreadsheet" class HTML(TextFormat): - TABLIB_MODULE = 'tablib.formats._html' - CONTENT_TYPE = 'text/html' + TABLIB_MODULE = "tablib.formats._html" + CONTENT_TYPE = "text/html" class XLS(TablibFormat): - TABLIB_MODULE = 'tablib.formats._xls' - CONTENT_TYPE = 'application/vnd.ms-excel' + TABLIB_MODULE = "tablib.formats._xls" + CONTENT_TYPE = "application/vnd.ms-excel" def create_dataset(self, in_stream): """ Create dataset from first sheet. """ import xlrd + xls_book = xlrd.open_workbook(file_contents=in_stream) dataset = tablib.Dataset() sheet = xls_book.sheets()[0] @@ -165,8 +185,8 @@ def create_dataset(self, in_stream): class XLSX(TablibFormat): - TABLIB_MODULE = 'tablib.formats._xlsx' - CONTENT_TYPE = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + TABLIB_MODULE = "tablib.formats._xlsx" + CONTENT_TYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" def create_dataset(self, in_stream): """ @@ -177,7 +197,9 @@ def create_dataset(self, in_stream): import openpyxl # 'data_only' means values are read from formula cells, not the formula itself - xlsx_book = openpyxl.load_workbook(BytesIO(in_stream), read_only=True, data_only=True) + xlsx_book = openpyxl.load_workbook( + BytesIO(in_stream), read_only=True, data_only=True + ) dataset = tablib.Dataset() sheet = xlsx_book.active @@ -186,21 +208,81 @@ def create_dataset(self, in_stream): rows = sheet.rows dataset.headers = [cell.value for cell in next(rows)] + ignore_blanks = getattr( + settings, "IMPORT_EXPORT_IMPORT_IGNORE_BLANK_LINES", False + ) for row in rows: row_values = [cell.value for cell in row] - dataset.append(row_values) + + if ignore_blanks: + # do not add empty rows to dataset + if not all(value is None for value in row_values): + dataset.append(row_values) + else: + dataset.append(row_values) return dataset + def export_data(self, dataset, **kwargs): + from openpyxl.utils.exceptions import IllegalCharacterError + + # #1698 temporary catch for deprecation warning in openpyxl + # this catch block must be removed when openpyxl updated + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + try: + return super().export_data(dataset, **kwargs) + except IllegalCharacterError as e: + if ( + getattr( + settings, "IMPORT_EXPORT_ESCAPE_ILLEGAL_CHARS_ON_EXPORT", False + ) + is True + ): + self._escape_illegal_chars(dataset) + return super().export_data(dataset, **kwargs) + logger.exception(e) + # not raising original error due to reflected xss risk + raise ValueError(_("export failed due to IllegalCharacterError")) + + def _escape_illegal_chars(self, dataset): + from openpyxl.cell.cell import ILLEGAL_CHARACTERS_RE + + def _do_escape(cell): + if type(cell) is str: + cell = ILLEGAL_CHARACTERS_RE.sub("\N{REPLACEMENT CHARACTER}", cell) + return cell + + for r in dataset: + row = dataset.lpop() + row = [_do_escape(cell) for cell in row] + dataset.append(row) + #: These are the default formats for import and export. Whether they can be #: used or not is depending on their implementation in the tablib library. -DEFAULT_FORMATS = [fmt for fmt in ( - CSV, - XLS, - XLSX, - TSV, - ODS, - JSON, - YAML, - HTML, -) if fmt.is_available()] +DEFAULT_FORMATS = [ + fmt + for fmt in ( + CSV, + XLS, + XLSX, + TSV, + ODS, + JSON, + YAML, + HTML, + ) + if fmt.is_available() +] + +#: These are the formats which support different data types (such as datetime +#: and numbers) for which `coerce_to_string` is to be set false dynamically. +BINARY_FORMATS = [ + fmt + for fmt in ( + XLS, + XLSX, + ODS, + ) + if fmt.is_available() +] diff --git a/import_export/forms.py b/import_export/forms.py index 76dbaf751..6b1809db1 100644 --- a/import_export/forms.py +++ b/import_export/forms.py @@ -1,69 +1,273 @@ import os.path +from collections.abc import Iterable +from copy import deepcopy +from itertools import chain from django import forms -from django.contrib.admin.helpers import ActionForm +from django.conf import settings from django.utils.translation import gettext_lazy as _ +from .resources import ModelResource -class ImportForm(forms.Form): - import_file = forms.FileField( - label=_('File to import') - ) - input_format = forms.ChoiceField( - label=_('Format'), + +class ImportExportFormBase(forms.Form): + resource = forms.ChoiceField( + label=_("Resource"), choices=(), - ) + required=False, + ) + format = forms.ChoiceField( + label=_("Format"), + choices=(), + ) - def __init__(self, import_formats, *args, **kwargs): - super().__init__(*args, **kwargs) - choices = [] - for i, f in enumerate(import_formats): - choices.append((str(i), f().get_title(),)) - if len(import_formats) > 1: - choices.insert(0, ('', '---')) + def __init__(self, formats, resources, **kwargs): + super().__init__(**kwargs) + self._init_resources(resources) + self._init_formats(formats) - self.fields['input_format'].choices = choices + def _init_resources(self, resources): + if not resources: + raise ValueError("no defined resources") + self.fields["resource"].choices = [ + (i, resource.get_display_name()) for i, resource in enumerate(resources) + ] + if len(resources) == 1: + self.fields["resource"].widget = forms.HiddenInput() + self.initial["resource"] = "0" + + def _init_formats(self, formats): + if not formats: + raise ValueError("invalid formats list") + + choices = [(str(i), f().get_title()) for i, f in enumerate(formats)] + if len(formats) == 1: + field = self.fields["format"] + field.value = formats[0]().get_title() + field.initial = 0 + field.widget.attrs["readonly"] = True + if len(formats) > 1: + choices.insert(0, ("", "---")) + + self.fields["format"].choices = choices + + +class ImportForm(ImportExportFormBase): + import_file = forms.FileField(label=_("File to import")) + + # field ordered for usability: + # ensure that the 'file' select appears before 'format' + # so that the 'guess_format' js logic makes sense + field_order = ["resource", "import_file", "format"] + + def __init__(self, formats, resources, **kwargs): + super().__init__(formats, resources, **kwargs) + if len(formats) > 1: + self.fields["import_file"].widget.attrs["class"] = "guess_format" + self.fields["format"].widget.attrs["class"] = "guess_format" + + @property + def media(self): + media = super().media + extra = "" if settings.DEBUG else ".min" + return media + forms.Media( + js=( + f"admin/js/vendor/jquery/jquery{extra}.js", + "admin/js/jquery.init.js", + "import_export/guess_format.js", + ) + ) class ConfirmImportForm(forms.Form): import_file_name = forms.CharField(widget=forms.HiddenInput()) original_file_name = forms.CharField(widget=forms.HiddenInput()) - input_format = forms.CharField(widget=forms.HiddenInput()) + format = forms.CharField(widget=forms.HiddenInput()) + resource = forms.CharField(widget=forms.HiddenInput(), required=False) def clean_import_file_name(self): - data = self.cleaned_data['import_file_name'] + data = self.cleaned_data["import_file_name"] data = os.path.basename(data) return data -class ExportForm(forms.Form): - file_format = forms.ChoiceField( - label=_('Format'), - choices=(), +class ExportForm(ImportExportFormBase): + export_items = forms.MultipleChoiceField( + widget=forms.MultipleHiddenInput(), required=False + ) + + +class SelectableFieldsExportForm(ExportForm): + def __init__(self, formats, resources, **kwargs): + super().__init__(formats, resources, **kwargs) + self._init_selectable_fields(resources) + + @property + def media(self): + media = super().media + return media + forms.Media( + js=("import_export/export_selectable_fields.js",), + css={ + "all": ["import_export/export.css"], + }, ) - def __init__(self, formats, *args, **kwargs): - super().__init__(*args, **kwargs) - choices = [] - for i, f in enumerate(formats): - choices.append((str(i), f().get_title(),)) - if len(formats) > 1: - choices.insert(0, ('', '---')) + def _init_selectable_fields(self, resources: Iterable[ModelResource]) -> None: + """ + Create `BooleanField(s)` for resource fields + """ + self.resources = resources + self.is_selectable_fields_form = True + self.resource_fields = {resource.__name__: [] for resource in resources} + + for index, resource in enumerate(resources): + boolean_fields = self._create_boolean_fields(resource, index) + self.resource_fields[resource.__name__] = boolean_fields + + # Order fields by resource select then boolean fields + ordered_fields = [ + "resource", + # flatten resource fields lists + *chain(*self.resource_fields.values()), + ] + self.order_fields(ordered_fields) + + def _get_field_label(self, resource: ModelResource, field_name: str) -> str: + title = field_name.replace("_", " ").title() + field = resource.fields.get(field_name) + if field and field.column_name != field_name: + title = f"{title} ({field.column_name})" + return title + + def _create_boolean_fields(self, resource: ModelResource, index: int) -> None: + # Initiate resource to get ordered export fields + fields = resource().get_export_order() + boolean_fields = [] # will be used for ordering the fields + is_initial_field = False + + for field in fields: + field_name = self.create_boolean_field_name(resource, field) + boolean_field = forms.BooleanField( + label=self._get_field_label(resource, field), + label_suffix="", + initial=True, + required=False, + ) + + # These attributes will be used for rendering in template + boolean_field.is_selectable_field = True + boolean_field.resource_name = resource.__name__ + boolean_field.resource_index = index + boolean_field.widget.attrs["resource-id"] = index + if is_initial_field is False: + boolean_field.initial_field = is_initial_field = True + + self.fields[field_name] = boolean_field + boolean_fields.append(field_name) + + return boolean_fields + + @staticmethod + def create_boolean_field_name(resource: ModelResource, field_name: str) -> str: + """ + Create field name by combining `resource_name` + `field_name` to prevent + conflict between resource fields with same name + + Example: + BookResource + name -> bookresource_name + BookResourceWithNames + name -> bookresourcewithnames_name + """ + return resource.__name__.lower() + "_" + field_name + + def clean(self): + selected_resource = self.get_selected_resource() + + if selected_resource: + # Remove fields for not selected resources + self._remove_unselected_resource_fields(selected_resource) + # Normalize resource field names + self._normalize_resource_fields(selected_resource) + # Validate at least one field is selected for selected resource + self._validate_any_field_selected(selected_resource) + + return self.cleaned_data + + def _remove_unselected_resource_fields( + self, selected_resource: ModelResource + ) -> None: + """ + Remove boolean fields except the fields for selected resource + """ + _cleaned_data = deepcopy(self.cleaned_data) + + for resource_name, fields in self.resource_fields.items(): + if selected_resource.__name__ == resource_name: + # Skip selected resource + continue + + for field in fields: + del _cleaned_data[field] + + self.cleaned_data = _cleaned_data + + def get_selected_resource(self): + if not getattr(self, "cleaned_data", None): + raise forms.ValidationError( + _("Form is not validated, call `is_valid` first") + ) + + # Return selected resource by index + resource_index = 0 + if "resource" in self.cleaned_data: + try: + resource_index = int(self.cleaned_data["resource"]) + except ValueError: + pass + return self.resources[resource_index] + + def _normalize_resource_fields(self, selected_resource: ModelResource) -> None: + """ + Field names are combination of resource_name + field_name, + normalize field names by removing resource name + """ + selected_resource_name = selected_resource.__name__.lower() + "_" + _cleaned_data = {} + self._selected_resource_fields = [] + + for k, v in self.cleaned_data.items(): + if selected_resource_name in k: + field_name = k.replace(selected_resource_name, "") + _cleaned_data[field_name] = v + if v is True: + # Add to _selected_resource_fields to determine what + # fields were selected for export + self._selected_resource_fields.append(field_name) + continue + _cleaned_data[k] = v - self.fields['file_format'].choices = choices + self.cleaned_data = _cleaned_data + def get_selected_resource_export_fields(self): + selected_resource = self.get_selected_resource() + # Initialize resource to use `get_export_order` method + resource_fields = selected_resource().get_export_order() + return [ + field + for field, value in self.cleaned_data.items() + if field in resource_fields and value is True + ] -def export_action_form_factory(formats): - """ - Returns an ActionForm subclass containing a ChoiceField populated with - the given formats. - """ - class _ExportActionForm(ActionForm): + def _validate_any_field_selected(self, resource) -> None: """ - Action form with export format ChoiceField. + Validate if any field for resource was selected in form data """ - file_format = forms.ChoiceField( - label=_('Format'), choices=formats, required=False) - _ExportActionForm.__name__ = str('ExportActionForm') + resource_fields = list(resource().get_export_order()) - return _ExportActionForm + if not any(v for k, v in self.cleaned_data.items() if k in resource_fields): + raise forms.ValidationError( + _("""Select at least 1 field for "%(resource_name)s" to export"""), + code="invalid", + params={ + "resource_name": resource.get_display_name(), + }, + ) diff --git a/import_export/instance_loaders.py b/import_export/instance_loaders.py index 96669c80f..68e0480c1 100644 --- a/import_export/instance_loaders.py +++ b/import_export/instance_loaders.py @@ -55,13 +55,10 @@ def __init__(self, *args, **kwargs): self.all_instances = {} if self.dataset.dict and self.pk_field.column_name in self.dataset.dict[0]: ids = [self.pk_field.clean(row) for row in self.dataset.dict] - qs = self.get_queryset().filter(**{ - "%s__in" % self.pk_field.attribute: ids - }) + qs = self.get_queryset().filter(**{"%s__in" % self.pk_field.attribute: ids}) self.all_instances = { - self.pk_field.get_value(instance): instance - for instance in qs + self.pk_field.get_value(instance): instance for instance in qs } def get_instance(self, row): diff --git a/import_export/locale/ar/LC_MESSAGES/django.mo b/import_export/locale/ar/LC_MESSAGES/django.mo index e99d951c2..c8702f007 100644 Binary files a/import_export/locale/ar/LC_MESSAGES/django.mo and b/import_export/locale/ar/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/ar/LC_MESSAGES/django.po b/import_export/locale/ar/LC_MESSAGES/django.po index c838330c2..25c049bb7 100644 --- a/import_export/locale/ar/LC_MESSAGES/django.po +++ b/import_export/locale/ar/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -19,62 +19,116 @@ msgstr "" "Plural-Forms: nplurals=6; plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 " "&& n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5;\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "إستيراد" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" -#: admin.py:262 -#, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

الملف المستورد لديه ترميز خاطئ: %s

" - -#: admin.py:264 +#: admin.py #, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s ووجهت أثناء محاولة قراءة ملف: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "إستبراد" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "تصدير" -#: admin.py:490 -msgid "You must select an export format." -msgstr "يجب تحديد تنسيق التصدير." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "تصدير %(verbose_name_plural)s المحددة" -#: forms.py:10 -msgid "File to import" -msgstr "ملف للإستيراد" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py +msgid "Resource" +msgstr "" + +#: forms.py msgid "Format" msgstr "تنسيق" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "ملف للإستيراد" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "الرئيسية" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" +msgstr[3] "" +msgstr[4] "" +msgstr[5] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "هذا المستورد سوف يستورد الحقول التالية : " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "إرسال" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -82,57 +136,67 @@ msgstr "" "فيما يلي إستعراض للبيانات التي سيتم إستيرادها. إذا كنت راضيا عن النتائج, " "انقر على 'تأكيد الإستيراد'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "تأكيد الإستيراد" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "هذا المستورد سوف يستورد الحقول التالية : " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "أخطاء" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "رقم الصطر" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "معاينة" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "جديد" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "تجاهل" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "حذف" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "تحديث" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "هذا المستورد سوف يستورد الحقول التالية : " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "يجب تحديد تنسيق التصدير." diff --git a/import_export/locale/bg/LC_MESSAGES/django.mo b/import_export/locale/bg/LC_MESSAGES/django.mo index a608a390f..fb06569b6 100644 Binary files a/import_export/locale/bg/LC_MESSAGES/django.mo and b/import_export/locale/bg/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/bg/LC_MESSAGES/django.po b/import_export/locale/bg/LC_MESSAGES/django.po index 65cc16edd..1b7c135ac 100644 --- a/import_export/locale/bg/LC_MESSAGES/django.po +++ b/import_export/locale/bg/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: Hristo Gatsinski \n" "Language-Team: LANGUAGE \n" @@ -18,62 +18,114 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Импортиране" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s чрез import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "Импортирането е завършено, с {} нови и {} обновени {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Импортирания файл има грешна кодировка: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s при опит за четене на файл: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Импортиране" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Експортиране" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Трябва да изберете формат за експортиране." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Експортиране на избраните %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Файл за импортиране" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Формат" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Файл за импортиране" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Начало" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Ще бъдат импортирани следните полета: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Изпълни" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,57 +133,67 @@ msgstr "" "Отдолу виждате преглед на данните за импортиране. Ако сте доволни от " "резултата, изберете 'Потвърди импортирането'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Потвърди импортирането" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Ще бъдат импортирани следните полета: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Грешки" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Номер на реда" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Преглед" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Нов" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Пропуснат" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Изтрит" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Обновен" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Ще бъдат импортирани следните полета: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Трябва да изберете формат за експортиране." diff --git a/import_export/locale/ca/LC_MESSAGES/django.mo b/import_export/locale/ca/LC_MESSAGES/django.mo index 2f847ecb5..b110ede80 100644 Binary files a/import_export/locale/ca/LC_MESSAGES/django.mo and b/import_export/locale/ca/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/ca/LC_MESSAGES/django.po b/import_export/locale/ca/LC_MESSAGES/django.po index 91c51c6cd..7fc2e178a 100644 --- a/import_export/locale/ca/LC_MESSAGES/django.po +++ b/import_export/locale/ca/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,62 +18,112 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importar" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

L'arxiu importat té una codificació incorrecta: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

S'ha trobat: %s mentre es llegia l'arxiu: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importar" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportar" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Heu de seleccionar un format d'exportació" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exportar %(verbose_name_plural)s seleccionats" -#: forms.py:10 -msgid "File to import" -msgstr "Arxiu a importar" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Format" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Arxiu a importar" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Inici" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Aquest importador importarà els següents camps: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Enviar" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,60 +131,67 @@ msgstr "" "A continuació podeu veure una vista prèvia de les dades que s'importaran. Si " "esteu satisfets amb els resultats, premeu 'Confirmar importació'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Confirmar importació" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Aquest importador importarà els següents camps: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Errors" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Número de línia" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Vista prèvia" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nou" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Omès" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Esborrar" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Actualitzar" -#~ msgid "Import finished" -#~ msgstr "Importació finalitzada" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Aquest importador importarà els següents camps: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Heu de seleccionar un format d'exportació" diff --git a/import_export/locale/cs/LC_MESSAGES/django.mo b/import_export/locale/cs/LC_MESSAGES/django.mo index 26dfa57f5..694a92e56 100644 Binary files a/import_export/locale/cs/LC_MESSAGES/django.mo and b/import_export/locale/cs/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/cs/LC_MESSAGES/django.po b/import_export/locale/cs/LC_MESSAGES/django.po index 1335b99a7..4d041d1a7 100644 --- a/import_export/locale/cs/LC_MESSAGES/django.po +++ b/import_export/locale/cs/LC_MESSAGES/django.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: 2017-05-02 19:17+0200\n" "Last-Translator: \n" "Language-Team: \n" @@ -18,62 +18,115 @@ msgstr "" "Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n" "X-Generator: Poedit 2.0.1\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Import" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s skrz import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "Import dokončen, {} nové a {} aktualizované {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Importovaný soubor má nesprávné kódování: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

Při zpracování souboru nastala chyba %s (soubor %s)

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Import" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Export" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Musíte vybrat formát pro export." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Vybrán export %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Soubor k importu" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formát" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Soubor k importu" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Domů" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Budou importována následující pole: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Odeslat" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,57 +134,67 @@ msgstr "" "Níže je zobrazen náhled importovaných dat. Pokud je vše v pořádku, stiskněte " "tlačítko „Provést import”" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Provést import" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Budou importována následující pole: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Chyby" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Číslo řádku" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Náhled" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nové" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Přeskočené" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Smazání" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Aktualizace" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Budou importována následující pole: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Musíte vybrat formát pro export." diff --git a/import_export/locale/de/LC_MESSAGES/django.mo b/import_export/locale/de/LC_MESSAGES/django.mo index b48b58329..aecbf3307 100644 Binary files a/import_export/locale/de/LC_MESSAGES/django.mo and b/import_export/locale/de/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/de/LC_MESSAGES/django.po b/import_export/locale/de/LC_MESSAGES/django.po index 1309d136b..36969f81e 100644 --- a/import_export/locale/de/LC_MESSAGES/django.po +++ b/import_export/locale/de/LC_MESSAGES/django.po @@ -1,81 +1,135 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. -# FIRST AUTHOR , YEAR. +# David Glenck , 2024. # msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" -"PO-Revision-Date: 2016-02-01 17:33+0100\n" -"Last-Translator: Jannis \n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" +"PO-Revision-Date: 2022-10-17 17:42+0200\n" +"Last-Translator: Jannes Blobel \n" "Language-Team: \n" "Language: de\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -"X-Generator: Poedit 1.8.4\n" +"X-Generator: Poedit 3.1.1\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importieren" + +#: admin.py #, python-format msgid "%s through import_export" -msgstr "" +msgstr "%s durch import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" +"Import fertiggestellt: {} neue, {} aktualisierte, {} gelöschte und {} " +"übersprungene {}." -#: admin.py:262 -#, fuzzy, python-format -#| msgid "

Imported file is not in unicode: %s

" -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Importierte Datei hat die falsche Zeichenkodierung: %s

" - -#: admin.py:264 -#, fuzzy, python-format -#| msgid "

%s encountred while trying to read file: %s

" -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s trat auf beim Versuch die Datei zu lesen: %s

" +#: admin.py +#, python-format +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" +"%(exc_name)s trat auf, beim Versuch, die Datei zu lesen. Stelle sicher, dass " +"du das richtige Format für die Datei gewählt hast." -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importieren" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" +"Keine gültigen Daten für den Import. Stelle sicher, dass deine Datei die " +"korrektenKopfzeilen und Daten für den Import hat." -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportieren" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Es muss ein Exportformat ausgewählt werden." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Ausgewählte %(verbose_name_plural)s exportieren" -#: forms.py:10 -msgid "File to import" -msgstr "Zu importierende Datei" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "Export schlug fehl wegen IllegalCharacterError" + +#: forms.py +msgid "Resource" +msgstr "Ressource" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Dateiformat" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Zu importierende Datei" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "Formular ist nicht validiert, führe zuerst `is_valid` aus" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "Wähle mindestens 1 Feld für \"%(resource_name)s\" zum Exportieren aus" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" +"Die folgenden Felder sind in 'import_id_fields' deklariert, sind aber keine " +"Felder der Ressource: %s" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" +"Die folgenden Felder sind in 'import_id_fields' deklariert, aber nicht in " +"der Kopfzeile der Datei vorhanden: %s" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "Aufruf von force_str() in der Instanz schlug fehl: %s" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Start" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "Exportiere %(len)s ausgewähltes Element." +msgstr[1] "Exportiere %(len)s ausgewählte Elemente." + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +msgid "This exporter will export the following fields: " +msgstr "Es werden die folgenden Felder exportiert: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Absenden" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -83,60 +137,66 @@ msgstr "" "Unten befindet sich eine Vorschau der zu importierenden Daten. Wenn die " "Ergebnisse zufriedenstellend sind, klicke auf \"Import bestätigen\"." -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Import bestätigen" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Es werden die folgenden Felder importiert:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Fehler" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Zeilennummer" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" -msgstr "" +msgstr "Die Validierung einiger Zeilen schlug fehl" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" +"Bitte korrigiere falls möglich diese Fehler in deiner Datei und lade sie " +"anschließend erneut mit dem obigen Formular hoch." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" -msgstr "" +msgstr "Zeile" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" -msgstr "" +msgstr "Nicht feldspezifisch" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Vorschau" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Neu" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Übersprungen" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Löschen" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" -msgstr "Überschreiben" +msgstr "Ändern" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Es werden die folgenden Felder importiert: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "Wert konnte nicht eingelesen werden." -#~ msgid "Import finished" -#~ msgstr "Import fertiggestellt." +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "use_natural_foreign_keys und key_is_id können nicht beide True sein" diff --git a/import_export/locale/es/LC_MESSAGES/django.mo b/import_export/locale/es/LC_MESSAGES/django.mo index 8d90b8ffb..6c7959dec 100644 Binary files a/import_export/locale/es/LC_MESSAGES/django.mo and b/import_export/locale/es/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/es/LC_MESSAGES/django.po b/import_export/locale/es/LC_MESSAGES/django.po index 901878e3f..606c53e48 100644 --- a/import_export/locale/es/LC_MESSAGES/django.po +++ b/import_export/locale/es/LC_MESSAGES/django.po @@ -2,78 +2,133 @@ # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # David Díaz , 2015. +# Santiago Muñoz , 2023. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"Language: \n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" +"PO-Revision-Date: 2023-09-22 11:53-0300\n" +"Last-Translator: Santiago Muñoz \n" +"Language-Team: Spanish\n" +"Language: es\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importar" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "" +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "Proceso de importación finalizado, con {} nuevos y {} actualizados" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" +"Se encontró %(exc_name)s mientras se intentaba leer el archivo. Asegúrese " +"que seleccionó el formato correcto para el archivo." -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importar" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportar" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Debes seleccionar un formato de exportación." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exportar %(verbose_name_plural)s seleccionados" -#: forms.py:10 -msgid "File to import" -msgstr "Fichero a importar" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "Recurso" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formato" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Fichero a importar" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Inicio" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Este importador importará los siguientes campos:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Enviar" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,60 +136,70 @@ msgstr "" "A continuación se muestra una vista previa de los datos a importar. Si estás " "satisfecho con los resultados, haz clic en 'Confirmar importación'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Confirmar importación" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Este importador importará los siguientes campos:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Errores" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Número de línea" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" -msgstr "" +msgstr "Falló la validación de algunas filas" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" +"Por favor corrija los siguientes errores en la información ingresada donde " +"sea posible, luego vuelva a subir el archivo utilizando el formulario de la " +"parte superior." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" -msgstr "" +msgstr "Fila" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" -msgstr "" +msgstr "No específico del campo" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Vista previa" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nuevo" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Omitido" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Borrar" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Actualizar" -#~ msgid "Import finished" -#~ msgstr "Importación finalizada" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Este importador importará los siguientes campos:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Debes seleccionar un formato de exportación." diff --git a/import_export/locale/es_AR/LC_MESSAGES/django.mo b/import_export/locale/es_AR/LC_MESSAGES/django.mo index 2d96ec0ba..56418f936 100644 Binary files a/import_export/locale/es_AR/LC_MESSAGES/django.mo and b/import_export/locale/es_AR/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/es_AR/LC_MESSAGES/django.po b/import_export/locale/es_AR/LC_MESSAGES/django.po index 0c94c3514..78b445ff8 100644 --- a/import_export/locale/es_AR/LC_MESSAGES/django.po +++ b/import_export/locale/es_AR/LC_MESSAGES/django.po @@ -2,14 +2,15 @@ # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # Gonzalo Bustos, 2015. +# Santiago Muñoz , 2023. # msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" -"PO-Revision-Date: 2015-10-11 18:49-0300\n" -"Last-Translator: Gonzalo Bustos\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" +"PO-Revision-Date: 2023-09-22 11:53-0300\n" +"Last-Translator: Santiago Muñoz \n" "Language-Team: Spanish (Argentina)\n" "Language: es_AR\n" "MIME-Version: 1.0\n" @@ -18,62 +19,116 @@ msgstr "" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Generator: Poedit 1.6.10\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importar" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "" +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "Proceso de importación finalizado, con {} nuevos y {} actualizados" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" +"Se encontró %(exc_name)s mientras se intentaba leer el archivo. Asegúrese " +"que seleccionó el formato correcto para el archivo." -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importar" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportar" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Debe seleccionar un formato de exportación." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exportar %(verbose_name_plural)s seleccionados" -#: forms.py:10 -msgid "File to import" -msgstr "Archivo a importar" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "Recurso" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formato" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Archivo a importar" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Inicio" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Este importador importará los siguientes campos:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Enviar" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,60 +136,70 @@ msgstr "" "A continuación se muestra una vista previa de los datos a importar. Si está " "satisfecho con los resultados, haga clic en 'Confirmar importación'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Confirmar importación" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Este importador importará los siguientes campos:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Errores" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Número de línea" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" -msgstr "" +msgstr "Falló la validación de algunas filas" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" +"Por favor corrija los siguientes errores en la información ingresada donde " +"sea posible, luego vuelva a subir el archivo utilizando el formulario de la " +"parte superior." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" -msgstr "" +msgstr "Fila" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" -msgstr "" +msgstr "No específico del campo" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Vista previa" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nuevo" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Omitido" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Borrar" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Actualizar" -#~ msgid "Import finished" -#~ msgstr "Importación finalizada" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Este importador importará los siguientes campos:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Debe seleccionar un formato de exportación." diff --git a/import_export/locale/fa/LC_MESSAGES/django.mo b/import_export/locale/fa/LC_MESSAGES/django.mo index 7b7b7290e..71e6963fc 100644 Binary files a/import_export/locale/fa/LC_MESSAGES/django.mo and b/import_export/locale/fa/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/fa/LC_MESSAGES/django.po b/import_export/locale/fa/LC_MESSAGES/django.po index 5543b1787..d11809606 100644 --- a/import_export/locale/fa/LC_MESSAGES/django.po +++ b/import_export/locale/fa/LC_MESSAGES/django.po @@ -2,13 +2,14 @@ # This file is distributed under the same license as the django-import-export package. # # Yazdan Ranjbar , 2021. +# MohammadReza Sadegh Zadeh , 2024. msgid "" msgstr "" "Project-Id-Version: 0.0.1\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: 2021-03-09 00:29+0030\n" -"Last-Translator: Yazdan Ranjbar \n" +"Last-Translator: MohammadReza Sadegh Zadeh \n" "Language-Team: Persain/Farsi \n" "Language: Farsi/Persian\n" "MIME-Version: 0.1\n" @@ -17,123 +18,199 @@ msgstr "" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Generator: Poedit 1.5.4\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "بارگذاری" + +#: admin.py #, python-format msgid "%s through import_export" -msgstr "%s یه وسیله ورودی-خروجی" +msgstr "%s با استفاده از ورودی-خروجی" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "بارگذاری تمام شد، با {} مورد جدید و {} مورد به روز شده." - -#: admin.py:262 -#, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

فایل بارگذاری شده encode اشتباهی دارد: %s

" +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "" +"بارگذاری تمام شد، با {} مورد جدید، {} مورد به روز شده، {} مورد حذف شده و {} " +"مورد در شده." -#: admin.py:264 +#: admin.py #, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

در هنگام خواندن فایل %s با %s مواجه شد

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" +"در حال خواندن فایل، یک خطا رخ داده است. لطفا از فرمت مناسب برای فایل استفاده " +"کنید. %(exc_name)s" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "بارگذاری" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "خروجی" -#: admin.py:490 -msgid "You must select an export format." -msgstr "شما باید یک فرمت خروجی انتخاب کنید" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "خروجی %(verbose_name_plural)s انتخاب شده" -#: forms.py:10 -msgid "File to import" -msgstr "قایل برای بارگذاری" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "منبع" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "فرمت" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "فایل برای بارگذاری" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "فرم معتبر نیست، ابتدا `is_valid` را فراخوانی کنید" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "حداقل یک فیلد را برای \"%(resource_name)s\" برای خروجی انتخاب کنید" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" +"فیلد‌های زیر در 'import_id_fields' اعلام شده اند، اما در فیلد‌های منبع وجود " +"ندارند: %s" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" +"فیلد‌های زیر در 'import_id_fields' اعلام شده اند، اما در فیلد‌های منبع وجود " +"ندارند: %s" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "فراخوانی به `force_str()` بر روی مورد نمونه با خطا رخ داده است: %s" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "خانه" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "خروجی %(len)s مورد انتخاب شده." +msgstr[1] "خروجی %(len)s مورد انتخاب شده." + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "این خروجی شامل این فیلد‌ها هست:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "ارسال" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" msgstr "" "پایین یک پیش‌نمایش از دیتا‌هایی است که بارگذاری خواهند شد اگر این موارد درست " -"هستروی 'تایید بارگذاری' گلیگ گنید" +"هستند، روی 'تایید بارگذاری' کلیک کنید" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "تایید بارگذاری" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "این بارگذاری شامل این فیلد‌ها هست:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "خطاها" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "شماره خط" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" -msgstr "برخی از سطر‌ها معتبر نبودند" +msgstr "برخی سطر‌ها معتبر نبودند" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "لطفا این خطا را تصحیح کنید و سپس مجدد فایل را بارگذاری کنید" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" -msgstr "سظر" +msgstr "سطر" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "فیلد‌های غیر اختصاصی" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" -msgstr "نمایش" +msgstr "پیش‌نمایش" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "جدید" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" -msgstr "در شد" +msgstr "رد شده" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "حذف" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "بروزرسانی" -#~ msgid "Import finished" -#~ msgstr "بارگذاری به اتمام رسید" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "این بارگذاری شامل این فیلد‌ها هست:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "مقدار قابل تجزیه نبود." + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "Value could not be parsed using defined date formats." +#~ msgstr "مقدار قابل تجزیه نبود با فرمت‌های تاریخ تعریف شده." + +#~ msgid "Value could not be parsed using defined datetime formats." +#~ msgstr "مقدار قابل تجزیه نبود با فرمت‌های تاریخ و زمان تعریف شده." + +#~ msgid "Value could not be parsed using defined time formats." +#~ msgstr "مقدار قابل تجزیه نبود با فرمت‌های زمان تعریف شده." + +#~ msgid "You must select an export format." +#~ msgstr "شما باید یک فرمت خروجی انتخاب کنید" diff --git a/import_export/locale/fi/LC_MESSAGES/django.mo b/import_export/locale/fi/LC_MESSAGES/django.mo new file mode 100644 index 000000000..3db8c4888 Binary files /dev/null and b/import_export/locale/fi/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/fi/LC_MESSAGES/django.po b/import_export/locale/fi/LC_MESSAGES/django.po new file mode 100644 index 000000000..7c6147671 --- /dev/null +++ b/import_export/locale/fi/LC_MESSAGES/django.po @@ -0,0 +1,195 @@ +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" +"PO-Revision-Date: 2023-05-10 15:23+0300\n" +"Last-Translator: Lauri Virtanen \n" +"Language-Team: \n" +"Language: fi\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Tuo" + +#: admin.py +#, python-format +msgid "%s through import_export" +msgstr "%s käyttäen import_export" + +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "Tuonti valmis. Lisätty {} ja päivitetty {} kohteita {}." + +#: admin.py +#, python-format +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" +"Kohdattiin %(exc_name)s tiedostoa lukiessa. Varmista, että olet valinnut " +"oikean tiedostotyypin." + +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" + +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html +msgid "Export" +msgstr "Vie" + +#: admin.py +#, python-format +msgid "Export selected %(verbose_name_plural)s" +msgstr "Vie valitut %(verbose_name_plural)s" + +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "Resurssi" + +#: forms.py +msgid "Format" +msgstr "Tiedostotyyppi" + +#: forms.py +msgid "File to import" +msgstr "Tuotava tiedosto" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html +msgid "Home" +msgstr "Etusivu" + +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +msgid "This exporter will export the following fields: " +msgstr "Tämä vienti vie seuraavat kentät: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html +msgid "Submit" +msgstr "Lähetä" + +#: templates/admin/import_export/import.html +msgid "" +"Below is a preview of data to be imported. If you are satisfied with the " +"results, click 'Confirm import'" +msgstr "" +"Alla on esikatselu tuotavista tiedoista. Jos olet tyytyväinen, paina " +"'Vahvista tuonti'." + +#: templates/admin/import_export/import.html +msgid "Confirm import" +msgstr "Vahvista tuonti" + +#: templates/admin/import_export/import.html +msgid "Errors" +msgstr "Virheet" + +#: templates/admin/import_export/import.html +msgid "Line number" +msgstr "Rivinumero" + +#: templates/admin/import_export/import.html +msgid "Some rows failed to validate" +msgstr "Joitakin rivejä ei voitu vahvistaa" + +#: templates/admin/import_export/import.html +msgid "" +"Please correct these errors in your data where possible, then reupload it " +"using the form above." +msgstr "" +"Korjaa nämä virheet tiedoissasi ja lähetä uudelleen käyttäen yllä olevaa " +"lomaketta." + +#: templates/admin/import_export/import.html +msgid "Row" +msgstr "Rivi" + +#: templates/admin/import_export/import.html +msgid "Non field specific" +msgstr "Ei liity mihinkään kenttään" + +#: templates/admin/import_export/import.html +msgid "Preview" +msgstr "Esikatselu" + +#: templates/admin/import_export/import.html +msgid "New" +msgstr "Uusi" + +#: templates/admin/import_export/import.html +msgid "Skipped" +msgstr "Ohitettu" + +#: templates/admin/import_export/import.html +msgid "Delete" +msgstr "Poisto" + +#: templates/admin/import_export/import.html +msgid "Update" +msgstr "Päivitys" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Tämä tuonti tuo seuraavat kentät: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Sinun täytyy valita tiedostotyyppi." diff --git a/import_export/locale/fr/LC_MESSAGES/django.mo b/import_export/locale/fr/LC_MESSAGES/django.mo index 06925c0a9..00383aef5 100644 Binary files a/import_export/locale/fr/LC_MESSAGES/django.mo and b/import_export/locale/fr/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/fr/LC_MESSAGES/django.po b/import_export/locale/fr/LC_MESSAGES/django.po index 27d16e1e2..dc4e4bf24 100644 --- a/import_export/locale/fr/LC_MESSAGES/django.po +++ b/import_export/locale/fr/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,120 +18,191 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importer" + +#: admin.py #, python-format msgid "%s through import_export" -msgstr "" +msgstr "%s via import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" +"Importation terminée: {} nouveaux, {} modifiés, {} supprimés et {} sautés " +"pour les {}." -#: admin.py:262 -#, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Le fichier importé a un encodage erroné: %s

" - -#: admin.py:264 +#: admin.py #, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s rencontré en essayant de lire le fichier: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" +"Erreur %(exc_name)s pendant la lecture du fichier. Assurez-vous d’avoir " +"choisi le bon format pour le fichier." -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importer" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" +"Pas de données valides importables. Assurez-vous que le fichier contient des " +"en-têtes ou données correctes pour l’importation." -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exporter" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Vous devez sélectionner un format d'exportation." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" -msgstr "Exporter %(verbose_name_plural)s selectionnés" +msgstr "Exporter %(verbose_name_plural)s selectionné(e)s" -#: forms.py:10 -msgid "File to import" -msgstr "Fichier à importer" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "exportation échouée à cause de IllegalCharacterError" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py +msgid "Resource" +msgstr "Ressource" + +#: forms.py msgid "Format" msgstr "Format" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Fichier à importer" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "Le formulaire n’est pas validé, appeler d’abord `is_valid`" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "Sélectionner au moins 1 champ pour \"%(resource_name)s\" pour exporter" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" +"Les champs suivants sont déclarés dans 'import_id_fields' mais ne sont pas présents " +"dans les champs de la ressource: %s" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" +"Les champs suivants sont déclarés dans 'import_id_fields' mais ne sont pas présents " +"dans les en-têtes du fichier: %s" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "un appel à force_str() sur une instance a échoué: %s" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Accueil" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "Exporter %(len)s élément sélectionné." +msgstr[1] "Exporter %(len)s éléments sélectionnés." + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +msgid "This exporter will export the following fields: " +msgstr "Cet exportateur va exporter les champs suivants: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Soumettre" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" msgstr "" -"Voici un aperçu des données à importer. Si vous êtes satisfait des " -"résultats, cliquez sur 'Confirmer l'importation'" +"Voici un aperçu des données à importer. Si vous êtes satisfait(e) des " +"résultats, cliquez sur 'Confirmer l’importation'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" -msgstr "Confirmer l'importation" +msgstr "Confirmer l’importation" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Cet importateur va importer les champs suivants: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Erreurs" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Numéro de ligne" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" -msgstr "" +msgstr "Certaines lignes ont échoué à la validation" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" +"Veuillez corriger ces erreurs dans les données si possible, puis envoyer à " +"nouveau en utilisant le formulaire ci-dessus." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" -msgstr "" +msgstr "Ligne" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" -msgstr "" +msgstr "Non spécifique à un champ" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Aperçu" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nouveau" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Ignoré" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Supprimer" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Mettre à jour" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Cet importateur va importer les champs suivants: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "La valeur n’a pas pu être interprétée." + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" +"use_natural_foreign_keys et key_is_id ne peuvent pas être True en même temps" + +#~ msgid "You must select an export format." +#~ msgstr "Vous devez sélectionner un format d'exportation." diff --git a/import_export/locale/it/LC_MESSAGES/django.mo b/import_export/locale/it/LC_MESSAGES/django.mo index 99e2326fb..cbd26022f 100644 Binary files a/import_export/locale/it/LC_MESSAGES/django.mo and b/import_export/locale/it/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/it/LC_MESSAGES/django.po b/import_export/locale/it/LC_MESSAGES/django.po index 8aed4b5c7..4342b4acd 100644 --- a/import_export/locale/it/LC_MESSAGES/django.po +++ b/import_export/locale/it/LC_MESSAGES/django.po @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: 2015-08-30 20:32+0100\n" "Last-Translator: Christian Galeffi \n" "Language-Team: Italian \n" @@ -17,62 +17,112 @@ msgstr "" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Generator: Poedit 1.5.4\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importare" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importare" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Esportare" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Devi selezionare un formato di esportazione." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Esporta selezionati %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "File da importare" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formato" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "File da importare" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Home" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Verranno importati i seguenti campi:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Inviare" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -80,60 +130,67 @@ msgstr "" "Questa è un'anteprima dei dati che saranno importati. Se il risultato è " "soddisfacente, premi 'Conferma importazione'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Conferma importazione" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Verranno importati i seguenti campi:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Errori" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Numero linea" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Anteprima" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nuovo" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Salta" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Cancella" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Aggiorna" -#~ msgid "Import finished" -#~ msgstr "Importazione terminata" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Verranno importati i seguenti campi:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Devi selezionare un formato di esportazione." diff --git a/import_export/locale/ja/LC_MESSAGES/django.mo b/import_export/locale/ja/LC_MESSAGES/django.mo index 23a149701..d54735c0b 100644 Binary files a/import_export/locale/ja/LC_MESSAGES/django.mo and b/import_export/locale/ja/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/ja/LC_MESSAGES/django.po b/import_export/locale/ja/LC_MESSAGES/django.po index ee40f626e..4387e69d0 100644 --- a/import_export/locale/ja/LC_MESSAGES/django.po +++ b/import_export/locale/ja/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,62 +18,111 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=1; plural=0;\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "インポート" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "インポート" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "エクスポート" -#: admin.py:490 -msgid "You must select an export format." -msgstr "エクスポートフォーマットを選択してください。" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "選択した %(verbose_name_plural)s をエクスポート" -#: forms.py:10 -msgid "File to import" -msgstr "インポートするファイル" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py +msgid "Resource" +msgstr "" + +#: forms.py msgid "Format" msgstr "フォーマット" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "インポートするファイル" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "ホーム" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "以下の列をインポートします。" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "確定" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,60 +130,67 @@ msgstr "" "インポートされるデータのプレビューを表示しています。この内容で問題なければ" "「インポート実行」をクリックしてください。" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "インポート実行" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "以下の列をインポートします。" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "エラー" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "行番号" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "プレビュー" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "新規" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "スキップ" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "削除" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "更新" -#~ msgid "Import finished" -#~ msgstr "インポートが完了しました。" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "以下の列をインポートします。" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "エクスポートフォーマットを選択してください。" diff --git a/import_export/locale/ko/LC_MESSAGES/django.mo b/import_export/locale/ko/LC_MESSAGES/django.mo index 626808250..b51e1d946 100644 Binary files a/import_export/locale/ko/LC_MESSAGES/django.mo and b/import_export/locale/ko/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/ko/LC_MESSAGES/django.po b/import_export/locale/ko/LC_MESSAGES/django.po index 1a9be059c..b7321ed82 100644 --- a/import_export/locale/ko/LC_MESSAGES/django.po +++ b/import_export/locale/ko/LC_MESSAGES/django.po @@ -8,9 +8,9 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: Jinmyeong Cho \n" +"Last-Translator: Yeongkwang Yang \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" @@ -18,62 +18,113 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=1; plural=0;\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "가져오기" + +#: admin.py #, python-format msgid "%s through import_export" -msgstr "" +msgstr "%s은(는) django-import-export를 통해 가져왔습니다." -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "" +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "가져오기 성공, {} 행 추가, {} 행 업데이트" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "가져오기" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "내보내기" -#: admin.py:490 -msgid "You must select an export format." -msgstr "내보낼 형식을 선택해주세요." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" +msgstr "선택한 %(verbose_name_plural)s 내보내기" + +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" msgstr "" -#: forms.py:10 -msgid "File to import" -msgstr "파일" +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "형식" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "파일" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "다음의 필드를 가져옵니다: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "제출" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,57 +132,67 @@ msgstr "" "다음은 불러올 데이터의 미리보기 입니다.데이터에 문제가 없다면 확인을 눌러 가" "져오기를 진행하세요." -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "확인" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "다음의 필드를 가져옵니다: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "에러" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "행 번호" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "유효성 검증에 실패한 행이 있습니다." -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "에러를 수정한 후 파일을 다시 업로드 해주세요." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" -msgstr "" +msgstr "지정된 필드 없음" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "미리보기" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "생성" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "넘어감" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "삭제" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "갱신" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "다음의 필드를 가져옵니다: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "내보낼 형식을 선택해주세요." diff --git a/import_export/locale/kz/LC_MESSAGES/django.mo b/import_export/locale/kz/LC_MESSAGES/django.mo index 224f339cb..bcfd011aa 100644 Binary files a/import_export/locale/kz/LC_MESSAGES/django.mo and b/import_export/locale/kz/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/kz/LC_MESSAGES/django.po b/import_export/locale/kz/LC_MESSAGES/django.po index 263fc5745..38d2bd4ce 100644 --- a/import_export/locale/kz/LC_MESSAGES/django.po +++ b/import_export/locale/kz/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: Muslim Beibytuly \n" "Language-Team: LANGUAGE \n" @@ -17,62 +17,114 @@ msgstr "" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Импорт" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s арқылы import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "Импорт аяқталды, {} жаңа және {} жаңартылды {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Импортталған файлда қате кодтау бар: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s файлды оқып жатқанда кездесті: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Импорт" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Экспорт" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Сіз экспорт форматын таңдауыңыз керек." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Таңдалған %(verbose_name_plural)s экспорттаңыз" -#: forms.py:10 -msgid "File to import" -msgstr "Импорттауға арналған файл" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Формат" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Импорттауға арналған файл" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Басты бет" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Бұл импорттаушы келесі өрістерді импорттайды: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Жіберу" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -80,28 +132,23 @@ msgstr "" "Төменде импортталатын деректерді алдын ала қарау берілген. Егер сіз " "нәтижелерге қанағаттансаңыз, 'Импортты растау' түймесін басыңыз." -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Импортты растау" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Бұл импорттаушы келесі өрістерді импорттайды: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Қателер" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Жол нөмірі" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "Кейбір жолдар тексерілмеді" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." @@ -109,30 +156,45 @@ msgstr "" "Мүмкіндігінше деректеріңіздегі қателерді түзетіңіз, содан кейін жоғарыдағы " "пішінді қолданып қайта жүктеңіз." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "Қатар" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "Өріске қатысты емес" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Алдын-ала қарау" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Жаңа" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Өткізілді" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Жою" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Жаңарту" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Бұл импорттаушы келесі өрістерді импорттайды: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Сіз экспорт форматын таңдауыңыз керек." diff --git a/import_export/locale/nl/LC_MESSAGES/django.mo b/import_export/locale/nl/LC_MESSAGES/django.mo index a129a088e..23306e90f 100644 Binary files a/import_export/locale/nl/LC_MESSAGES/django.mo and b/import_export/locale/nl/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/nl/LC_MESSAGES/django.po b/import_export/locale/nl/LC_MESSAGES/django.po index ef0e9e198..8553a6f91 100644 --- a/import_export/locale/nl/LC_MESSAGES/django.po +++ b/import_export/locale/nl/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,62 +18,114 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importeren" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s door import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "Import is klaar met {} nieuwe en {} geupdate {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Het geimporteerde bestand heeft de verkeerde encoding: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s tegengekomen tijden het lezen van het bestand: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importeren" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exporteren" -#: admin.py:490 -msgid "You must select an export format." -msgstr "U moet een export formaat kiezen." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exporteer geselecteerde %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Bestand om te importeren" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formaat" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Bestand om te importeren" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Terug" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Deze import zal de volgende velden toevoegen" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Indienen" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,28 +133,23 @@ msgstr "" "Hieronder is een voorvertoning van de data die geïmporteerd zal worden. Als " "u tevreden bent met het resultaat, klik dan op 'Accepteer de import'." -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Accepteer de import" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Deze import zal de volgende velden toevoegen" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Fouten" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Regel nummer" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "Sommige regels zijn niet goedgekeurd" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." @@ -110,30 +157,45 @@ msgstr "" "Verander alstublieft de volgende fouten in uw data waar mogelijk. Upload het " "bestand daarna nogmaals met het veld hierboven." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "Regel" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "Niet veld specifiek" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Voorbeeldweergave" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nieuw" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Overgeslagen" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Verwijderen" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Bijwerken" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Deze import zal de volgende velden toevoegen" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "U moet een export formaat kiezen." diff --git a/import_export/locale/pl/LC_MESSAGES/django.mo b/import_export/locale/pl/LC_MESSAGES/django.mo index 92fb563bc..f134056a1 100644 Binary files a/import_export/locale/pl/LC_MESSAGES/django.mo and b/import_export/locale/pl/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/pl/LC_MESSAGES/django.po b/import_export/locale/pl/LC_MESSAGES/django.po index 2b57f6774..326b6af3d 100644 --- a/import_export/locale/pl/LC_MESSAGES/django.po +++ b/import_export/locale/pl/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -19,62 +19,115 @@ msgstr "" "Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 " "|| n%100>=20) ? 1 : 2);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Import" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s przez import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "Import zakończony, z {} nowymi i {} zaktualizowanymi {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Zaimportowany plik ma złe kodowanie: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s napotkany podczas próby czytania pliku: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Import" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Eksport" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Musisz wybrać format eksportu." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Eksportuj wybrane %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Plik do importu" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py +msgid "Resource" +msgstr "" + +#: forms.py msgid "Format" msgstr "Format" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Plik do importu" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Powrót" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Zostaną zaimportowane następujące pola: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Wyślij" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -82,60 +135,67 @@ msgstr "" "Poniżej znajdują się przykładowe dane do zaimportowania. Jeśli " "satysfakcjonuje Cię wynik, kliknij 'Potwierdź import'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Potwierdź import" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Zostaną zaimportowane następujące pola: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Błędy" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Numer linii" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Podgląd" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nowy" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Pominięty" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Usuń" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Zaktualizowany" -#~ msgid "Import finished" -#~ msgstr "Zakończono importowanie" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Zostaną zaimportowane następujące pola: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Musisz wybrać format eksportu." diff --git a/import_export/locale/pt_BR/LC_MESSAGES/django.mo b/import_export/locale/pt_BR/LC_MESSAGES/django.mo index ef56d624c..df54feaa8 100644 Binary files a/import_export/locale/pt_BR/LC_MESSAGES/django.mo and b/import_export/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/pt_BR/LC_MESSAGES/django.po b/import_export/locale/pt_BR/LC_MESSAGES/django.po index a84acb35c..ccf5f88dd 100644 --- a/import_export/locale/pt_BR/LC_MESSAGES/django.po +++ b/import_export/locale/pt_BR/LC_MESSAGES/django.po @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: 2020-06-06 10:30-0500\n" "Last-Translator: Daniel Pluth \n" "Language-Team: \n" @@ -17,62 +17,114 @@ msgstr "" "Plural-Forms: nplurals=2; plural=(n > 1);\n" "X-Generator: Lokalize 20.04.1\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importar" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s através import_export " -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "A importação foi completada com {} novas e {} atualizadas {}" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

O arquivo importado tem uma codificação errada: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s encontrado durante a leitura do arquivo: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importar" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportar" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Você tem que selecionar um formato de exportação." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exportar %(verbose_name_plural)s selecionados" -#: forms.py:10 -msgid "File to import" -msgstr "Arquivo a ser importado" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formato" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Arquivo a ser importado" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Início" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Este importador vai importar os seguintes campos:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Enviar" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -80,28 +132,23 @@ msgstr "" "Ver abaixo uma prévia dos dados a serem importados. Se você esta satisfeito " "com os resultados, clique em 'Confirmar importação'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Confirmar importação" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Este importador vai importar os seguintes campos:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Erros" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Número da linha" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "Algumas linhas não foram validadas" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." @@ -109,33 +156,45 @@ msgstr "" "Por favor corrigir os erros nos dados onde possível e recarregar os dados " "com o formato acima." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "Linha" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "Campo não é específico" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Prévia" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Novo" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Não usados" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Remover" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Atualizar" -#~ msgid "Import finished" -#~ msgstr "Importação finalizada" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Este importador vai importar os seguintes campos:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Você tem que selecionar um formato de exportação." diff --git a/import_export/locale/ru/LC_MESSAGES/django.mo b/import_export/locale/ru/LC_MESSAGES/django.mo index 67ddb3d70..f3967c9ad 100644 Binary files a/import_export/locale/ru/LC_MESSAGES/django.mo and b/import_export/locale/ru/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/ru/LC_MESSAGES/django.po b/import_export/locale/ru/LC_MESSAGES/django.po index f80a73d08..2833e8917 100644 --- a/import_export/locale/ru/LC_MESSAGES/django.po +++ b/import_export/locale/ru/LC_MESSAGES/django.po @@ -3,107 +3,159 @@ # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR , YEAR. # -#, fuzzy msgid "" msgstr "" -"Project-Id-Version: PACKAGE VERSION\n" +"Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"Language: \n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" +"PO-Revision-Date: 2024-04-26 20:55+0700\n" +"Last-Translator: \n" +"Language-Team: \n" +"Language: ru\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" +"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" +"X-Generator: Poedit 3.0.1\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Импорт" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s через import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "Импорт завершен, {} новых и {} обновлено." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "Импорт завершен: {} новых, {} обновлено, {} удалено и {} пропущено {}." -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

Импортированный файл имеет неправильную кодировку: %s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s при попытке прочитать файл: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" +"При чтении файла возникла ошибка %(exc_name)s. Убедитесь, что используется " +"подходящий формат файла." -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Импорт" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" +"Некорректные данные для импорта. Убедитесь, что файл содержит корректные " +"заголовок и данные." -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Экспорт" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Необходимо выбрать формат экспорта" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Экспортировать выбранные %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Файл для импорта" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py +msgid "Resource" +msgstr "Ресурс" + +#: forms.py msgid "Format" msgstr "Формат" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Файл для импорта" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "Необходимо сначала вызвать `is_valid` для валидации формы" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "Выберите хотя бы одно поле для экспорта \"%(resource_name)s\"" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" +"Следующие поля указаны в 'import_id_fields', но отсутствуют в полях ресурса: " +"%s" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" +"Следующие поля указаны в 'import_id_fields', но отсутствуют в заголовке " +"файла: %s" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "вызов 'force_str()' завершился ошибкой: %s" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Главная" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "Экспортировать %(len)s выбранный элемент." +msgstr[1] "Экспортировать %(len)s выбранных элемента." +msgstr[2] "Экспортировать %(len)s выбранных элементов." + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +msgid "This exporter will export the following fields: " +msgstr "Будут экспортированы следующие поля: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Отправить" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" msgstr "" -"Ниже показано то, что будет импортировано. Нажмите 'Подтвердить импорт',если " -"Вас устраивает результат" +"Ниже показано то, что будет импортировано. Нажмите 'Подтвердить импорт', " +"если Вас устраивает результат" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Подтвердить импорт" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Будут импортированы следующие поля: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Ошибки" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Номер строки" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "Некоторые строки не прошли валидацию" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." @@ -111,33 +163,54 @@ msgstr "" "По возможности исправьте эти ошибки в своих данных, а затем повторно " "загрузите их, используя форму выше." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "Строка" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "Не относящиеся к конкретному полю" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Предпросмотр" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Добавлено" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Пропущено" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Удалено" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Обновлено" -#~ msgid "Import finished" -#~ msgstr "Импорт завершен" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Будут импортированы следующие поля: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "Ошибка парсинга значения." + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "Value could not be parsed using defined date formats." +#~ msgstr "Ошибка парсинга значения даты." + +#~ msgid "Value could not be parsed using defined datetime formats." +#~ msgstr "Ошибка парсинга значения даты и времени." + +#~ msgid "Value could not be parsed using defined time formats." +#~ msgstr "Ошибка парсинга значения времени." + +#~ msgid "You must select an export format." +#~ msgstr "Необходимо выбрать формат экспорта" diff --git a/import_export/locale/sk/LC_MESSAGES/django.mo b/import_export/locale/sk/LC_MESSAGES/django.mo index 3da6edb65..0e2d18f62 100644 Binary files a/import_export/locale/sk/LC_MESSAGES/django.mo and b/import_export/locale/sk/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/sk/LC_MESSAGES/django.po b/import_export/locale/sk/LC_MESSAGES/django.po index 181c0b939..146734a98 100644 --- a/import_export/locale/sk/LC_MESSAGES/django.po +++ b/import_export/locale/sk/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,62 +18,113 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "Importovať" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "Importovať" - -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Exportovať" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Je potrebné vybrať formát exportu." - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Exportovať vybrané %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "Importovať súbor" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Formát" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "Importovať súbor" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Domov" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Budú importované nasledujúce polia: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Odoslať" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -81,60 +132,67 @@ msgstr "" "Nižšie je zobrazený náhľad importovaných dát. Ak je všetko v poriadku, " "kliknite na tlačidlo 'Potvrdiť import'" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "Potvrdiť import" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Budú importované nasledujúce polia: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Chyby" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Číslo riadku" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Náhľad" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Nový" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Preskočený" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Vymazaný" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Aktualizovaný" -#~ msgid "Import finished" -#~ msgstr "Import dokončený" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Budú importované nasledujúce polia: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "Je potrebné vybrať formát exportu." diff --git a/import_export/locale/tr/LC_MESSAGES/django.mo b/import_export/locale/tr/LC_MESSAGES/django.mo index e62b0071a..14ddc99ab 100644 Binary files a/import_export/locale/tr/LC_MESSAGES/django.mo and b/import_export/locale/tr/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/tr/LC_MESSAGES/django.po b/import_export/locale/tr/LC_MESSAGES/django.po index ccb97e046..5d5d6f386 100644 --- a/import_export/locale/tr/LC_MESSAGES/django.po +++ b/import_export/locale/tr/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -18,63 +18,122 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "İçe aktar" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s vasıtasıyla import_export" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." +#: admin.py +#, fuzzy +#| msgid "Import finished, with {} new and {} updated {}." +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." msgstr "{} yeni ve {} güncellenen {} ile içe aktarma bitti" -#: admin.py:262 -#, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

İçe aktarılan dosyada yanlış kodlama bulunmaktadır: %s

" - -#: admin.py:264 +#: admin.py #, python-format -msgid "

%s encountered while trying to read file: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." msgstr "" -"

Dosya okunurken %s hatası ile karşılaşıldı, okunan dosya adı: %s

" +"%(exc_name)s dosyayı okumaya çalışırken karşılaşıldı. Dosya için doğru " +"biçimi seçtiğinizden emin olun." -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "İçe aktar" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" +"Geçerli içe aktarılacak veri yok. Dosyanızın doğru başlıkları veya içe " +"aktarım için verileri olduğundan emin olun." -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "Dışa aktar" -#: admin.py:490 -msgid "You must select an export format." -msgstr "Bir dosya biçimi seçmelisiniz" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "Seçililenleri dışa aktar %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "İçe alınacak dosya" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "dışa aktarma, IllegalCharacterError nedeniyle başarısız oldu" + +#: forms.py +msgid "Resource" +msgstr "Kaynak" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "Dosya biçimi" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "İçe alınacak dosya" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "Form doğrulanmadı, önce `is_valid` çağırın" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "\"%(resource_name)s\" için en az 1 alan seçin" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" +"Aşağıdaki alanlar 'import_id_fields' içinde belirtilmiş ancak kaynak " +"alanlarında bulunmamaktadır: %s" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" +"Aşağıdaki alanlar 'import_id_fields' içinde belirtilmiş ancak dosya " +"başlıklarında bulunmamaktadır: %s" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "force_str() çağrısı örnekte başarısız oldu: %s" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "Ana sayfa" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "Dışa aktar %(len)s seçilen öğe." +msgstr[1] "Dışa aktar %(len)s seçilen öğeler." + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +#, fuzzy +#| msgid "This importer will import the following fields: " +msgid "This exporter will export the following fields: " +msgstr "Bu içe aktarıcı aşağıdaki alanları içe aktaracaktır: " + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "Kaydet" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" @@ -82,28 +141,23 @@ msgstr "" "Aşağıda içe aktarılacak verilerin önizlemesi verilmiştir. Sonuçlardan " "memnunsanız 'İçe aktarmayı onayla'yı tıklayın." -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "İçe aktarmayı onayla" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "Bu içe aktarıcı aşağıdaki alanları içe aktaracaktır: " - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "Hatalar" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "Satır numarası" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "Bazı satırlar doğrulanamadı" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." @@ -111,30 +165,45 @@ msgstr "" "Lütfen verilerinizdeki bu hataları olabildiğince düzeltin, sonra yukarıdaki " "formu kullanarak tekrar yükleyin." -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "Satır" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "Alan olmayana özgü" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "Ön izleme" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "Yeni" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "Atlandı" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "Sil" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "Güncelle" + +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "Bu içe aktarıcı aşağıdaki alanları içe aktaracaktır: " + +#: widgets.py +msgid "Value could not be parsed." +msgstr "Değer ayrıştırılamadı." + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "use_natural_foreign_keys ve key_is_id aynı anda True olamaz" + +#~ msgid "You must select an export format." +#~ msgstr "Bir dosya biçimi seçmelisiniz" diff --git a/import_export/locale/zh_Hans/LC_MESSAGES/django.mo b/import_export/locale/zh_Hans/LC_MESSAGES/django.mo index 9c9a3cea4..316502ed8 100644 Binary files a/import_export/locale/zh_Hans/LC_MESSAGES/django.mo and b/import_export/locale/zh_Hans/LC_MESSAGES/django.mo differ diff --git a/import_export/locale/zh_Hans/LC_MESSAGES/django.po b/import_export/locale/zh_Hans/LC_MESSAGES/django.po index 3cbce25d4..25fe87417 100644 --- a/import_export/locale/zh_Hans/LC_MESSAGES/django.po +++ b/import_export/locale/zh_Hans/LC_MESSAGES/django.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-10-18 20:53+0100\n" +"POT-Creation-Date: 2025-02-24 11:40-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: hao wang <173300430@qq.com>\n" "Language-Team: LANGUAGE \n" @@ -18,121 +18,175 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=1; plural=0;\n" -#: admin.py:158 +#: admin.py templates/admin/import_export/change_list_import_item.html +#: templates/admin/import_export/import.html +msgid "Import" +msgstr "导入" + +#: admin.py #, python-format msgid "%s through import_export" msgstr "%s 通过 django-import-export导入" -#: admin.py:164 -msgid "Import finished, with {} new and {} updated {}." -msgstr "导入成功,新增{}条记录,更新{}条记录。" +#: admin.py +msgid "Import finished: {} new, {} updated, {} deleted and {} skipped {}." +msgstr "导入成功,新增{}条记录,更新{}条记录,删除{}条记录,忽略{}条记录。" -#: admin.py:262 +#: admin.py #, python-format -msgid "

Imported file has a wrong encoding: %s

" -msgstr "

导入的文件编码有误:%s

" - -#: admin.py:264 -#, python-format -msgid "

%s encountered while trying to read file: %s

" -msgstr "

%s 读取文件时遇到了冲突: %s

" +msgid "" +"%(exc_name)s encountered while trying to read file. Ensure you have chosen " +"the correct format for the file." +msgstr "" -#: admin.py:295 templates/admin/import_export/change_list_import_item.html:5 -#: templates/admin/import_export/import.html:10 -msgid "Import" -msgstr "导入" +#: admin.py +msgid "" +"No valid data to import. Ensure your file has the correct headers or data " +"for import." +msgstr "" -#: admin.py:429 templates/admin/import_export/change_list_export_item.html:5 -#: templates/admin/import_export/export.html:7 +#: admin.py templates/admin/import_export/change_form.html +#: templates/admin/import_export/change_list_export_item.html +#: templates/admin/import_export/export.html msgid "Export" msgstr "导出" -#: admin.py:490 -msgid "You must select an export format." -msgstr "您必须选择一个导出格式。" - -#: admin.py:513 +#: admin.py #, python-format msgid "Export selected %(verbose_name_plural)s" msgstr "导出选中的 %(verbose_name_plural)s" -#: forms.py:10 -msgid "File to import" -msgstr "导入文件" +#: formats/base_formats.py +msgid "export failed due to IllegalCharacterError" +msgstr "" + +#: forms.py +msgid "Resource" +msgstr "" -#: forms.py:13 forms.py:41 forms.py:66 +#: forms.py msgid "Format" msgstr "格式" -#: templates/admin/import_export/base.html:11 +#: forms.py +msgid "File to import" +msgstr "导入文件" + +#: forms.py +msgid "Form is not validated, call `is_valid` first" +msgstr "" + +#: forms.py +#, python-format +msgid "Select at least 1 field for \"%(resource_name)s\" to export" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the resource fields: %s" +msgstr "" + +#: resources.py +#, python-format +msgid "" +"The following fields are declared in 'import_id_fields' but are not present " +"in the file headers: %s" +msgstr "" + +#: results.py +#, python-format +msgid "call to force_str() on instance failed: %s" +msgstr "" + +#: templates/admin/import_export/base.html msgid "Home" msgstr "" -#: templates/admin/import_export/export.html:31 -#: templates/admin/import_export/import.html:52 +#: templates/admin/import_export/export.html +#, python-format +msgid "Export %(len)s selected item." +msgid_plural "Export %(len)s selected items." +msgstr[0] "" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/resource_fields_list.html +msgid "This exporter will export the following fields: " +msgstr "此次将导出以下字段:" + +#: templates/admin/import_export/export.html +#: templates/admin/import_export/import.html msgid "Submit" msgstr "提交" -#: templates/admin/import_export/import.html:20 +#: templates/admin/import_export/import.html msgid "" "Below is a preview of data to be imported. If you are satisfied with the " "results, click 'Confirm import'" msgstr "以下是导入数据的预览。如果确认结果没有问题,可以点击 “确认导入”" -#: templates/admin/import_export/import.html:23 +#: templates/admin/import_export/import.html msgid "Confirm import" msgstr "确认导入" -#: templates/admin/import_export/import.html:31 -msgid "This importer will import the following fields: " -msgstr "此次将导入以下字段:" - -#: templates/admin/import_export/import.html:61 -#: templates/admin/import_export/import.html:90 +#: templates/admin/import_export/import.html msgid "Errors" msgstr "错误" -#: templates/admin/import_export/import.html:72 +#: templates/admin/import_export/import.html msgid "Line number" msgstr "行号" -#: templates/admin/import_export/import.html:82 +#: templates/admin/import_export/import.html msgid "Some rows failed to validate" msgstr "某些行验数据证失败" -#: templates/admin/import_export/import.html:84 +#: templates/admin/import_export/import.html msgid "" "Please correct these errors in your data where possible, then reupload it " "using the form above." msgstr "请使用上面的表单,纠正这些提示有错误的数据,并重新上传" -#: templates/admin/import_export/import.html:89 +#: templates/admin/import_export/import.html msgid "Row" msgstr "行" -#: templates/admin/import_export/import.html:116 +#: templates/admin/import_export/import.html msgid "Non field specific" msgstr "没有指定的字段" -#: templates/admin/import_export/import.html:137 +#: templates/admin/import_export/import.html msgid "Preview" msgstr "预览" -#: templates/admin/import_export/import.html:152 +#: templates/admin/import_export/import.html msgid "New" msgstr "新增" -#: templates/admin/import_export/import.html:154 +#: templates/admin/import_export/import.html msgid "Skipped" msgstr "忽略" -#: templates/admin/import_export/import.html:156 +#: templates/admin/import_export/import.html msgid "Delete" msgstr "删除" -#: templates/admin/import_export/import.html:158 +#: templates/admin/import_export/import.html msgid "Update" msgstr "更新" -#~ msgid "Import finished" -#~ msgstr "导入完成" +#: templates/admin/import_export/resource_fields_list.html +msgid "This importer will import the following fields: " +msgstr "此次将导入以下字段:" + +#: widgets.py +msgid "Value could not be parsed." +msgstr "" + +#: widgets.py +msgid "use_natural_foreign_keys and key_is_id cannot both be True" +msgstr "" + +#~ msgid "You must select an export format." +#~ msgstr "您必须选择一个导出格式。" diff --git a/import_export/management/__init__.py b/import_export/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import_export/management/commands/__init__.py b/import_export/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import_export/management/commands/export.py b/import_export/management/commands/export.py new file mode 100644 index 000000000..5540bad25 --- /dev/null +++ b/import_export/management/commands/export.py @@ -0,0 +1,60 @@ +import sys + +from django.core.management.base import BaseCommand + +from import_export.command_utils import ( + get_default_format_names, + get_format_class, + get_resource_class, +) + + +class Command(BaseCommand): + help = "Export data from a specified resource or model in a chosen format." + + def add_arguments(self, parser): + default_format_names = get_default_format_names() + parser.add_argument( + "format", + help=f"""Specify the export format. Can be one of the default formats + ({default_format_names}), or a custom format class provided as a dotted path + (e.g., 'XLSX' or 'mymodule.CustomCSV').""", + ) + parser.add_argument( + "resource", + help="""Specify the resource or model to export. Accepts a resource class or + a model class in dotted path format "(e.g., 'mymodule.resources.MyResource' + or 'auth.User').""", + ) + parser.add_argument( + "--encoding", + help="Specify the encoding to use for the exported data (e.g., 'utf-8'). " + "This applies to text-based formats.", + ) + + def handle(self, *args, **options): + model_or_resource_class = options.get("resource") + format_name = options.get("format") + encoding = options.get("encoding") + + resource = get_resource_class(model_or_resource_class)() + format_class = get_format_class(format_name, None, encoding) + + data = resource.export() + export_data = format_class.export_data(data) + + if not format_class.is_binary(): + if encoding: + export_data = export_data.encode(encoding) + else: + export_data = export_data.encode() + + if format_class.is_binary() and self.stdout.isatty(): + self.stderr.write( + self.style.ERROR( + "This is a binary format and your terminal does not support " + "binary data. Redirect the output to a file." + ) + ) + sys.exit(1) + self.stdout.buffer.write(export_data) diff --git a/import_export/management/commands/import.py b/import_export/management/commands/import.py new file mode 100644 index 000000000..2ea5f0e84 --- /dev/null +++ b/import_export/management/commands/import.py @@ -0,0 +1,116 @@ +import sys + +from django.core.management.base import BaseCommand, CommandError + +from import_export.command_utils import ( + get_default_format_names, + get_format_class, + get_resource_class, +) +from import_export.results import RowResult + + +class Command(BaseCommand): + help = "Import data from various formats into a specified Django model." + + def add_arguments(self, parser): + parser.add_argument( + "resource", + help="""The resource class or model class specified as a dotted path, + e.g., mymodule.resources.MyResource or auth.User.""", + ) + parser.add_argument( + "import_file_name", + help="""The file to import from (use "-" for stdin).""", + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Do NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--raise-errors", + action="store_true", + help="Raise errors if encountered during execution.", + ) + parser.add_argument( + "-n", + "--dry-run", + action="store_true", + help="Perform a dry run without making any changes.", + ) + default_format_names = get_default_format_names() + parser.add_argument( + "--format", + help=f"""The data format. If not provided, it will be guessed from the + mimetype. You can use a format from DEFAULT_FORMATS ({default_format_names}) + or specify a custom format class using a dotted path + (e.g., XLSX or mymodule.CustomCSV).""", + ) + parser.add_argument( + "--encoding", + help="The character encoding of the data.", + ) + + def handle(self, *args, **options): + interactive = options["interactive"] + dry_run = options.get("dry_run") + raise_errors = options.get("raise_errors") + file_name = options.get("import_file_name") + model_or_resource_class = options.get("resource") + format_name = options.get("format") + encoding = options.get("encoding") + + if interactive: + message = "Are you sure you want to import the data? [yes/no]: " + if input(message) != "yes": + raise CommandError("Import cancelled.") + + resource = get_resource_class(model_or_resource_class)() + format_class = get_format_class(format_name, file_name, encoding) + if file_name == "-": + if format_class.is_binary(): + data = sys.stdin.buffer.read() + else: + data = sys.stdin.read() + else: + with open(file_name, format_class.get_read_mode()) as file: + data = file.read() + + dataset = format_class.create_dataset(data) + + result = resource.import_data( + dataset, dry_run=dry_run, raise_errors=raise_errors + ) + + if dry_run: + self.stderr.write( + self.style.NOTICE( + "You have activated the --dry-run option" + " so no data will be modified." + ) + ) + + if result.has_errors(): + self.stderr.write(self.style.ERROR("Import errors!")) + for error in result.base_errors: + self.stderr.write(repr(error.error), self.style.ERROR) + for line, errors in result.row_errors(): + for error in errors: + self.stderr.write( + self.style.ERROR(f"Line number: {line} - {repr(error.error)}") + ) + sys.exit(1) + else: + success_message = ( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format( + result.totals[RowResult.IMPORT_TYPE_NEW], + result.totals[RowResult.IMPORT_TYPE_UPDATE], + result.totals[RowResult.IMPORT_TYPE_DELETE], + result.totals[RowResult.IMPORT_TYPE_SKIP], + resource._meta.model._meta.verbose_name_plural, + ) + self.stderr.write(self.style.NOTICE(success_message)) diff --git a/import_export/mixins.py b/import_export/mixins.py index 5c2830ee7..1fc64b232 100644 --- a/import_export/mixins.py +++ b/import_export/mixins.py @@ -1,82 +1,271 @@ +import logging +import warnings +from warnings import warn + +from django.conf import settings from django.http import HttpResponse from django.utils.timezone import now from django.views.generic.edit import FormView from .formats import base_formats -from .forms import ExportForm +from .forms import SelectableFieldsExportForm from .resources import modelresource_factory from .signals import post_export +logger = logging.getLogger(__name__) + class BaseImportExportMixin: - formats = base_formats.DEFAULT_FORMATS + """ + Base mixin for functionality related to importing and exporting via the Admin + interface. + """ + resource_class = None + resource_classes = [] + + @property + def formats(self): + return getattr(settings, "IMPORT_EXPORT_FORMATS", base_formats.DEFAULT_FORMATS) + + @property + def export_formats(self): + return getattr(settings, "EXPORT_FORMATS", self.formats) + + @property + def import_formats(self): + return getattr(settings, "IMPORT_FORMATS", self.formats) - def get_resource_class(self): - if not self.resource_class: - return modelresource_factory(self.model) - return self.resource_class + def check_resource_classes(self, resource_classes): + if resource_classes and not hasattr(resource_classes, "__getitem__"): + raise Exception( + "The resource_classes field type must be " + "subscriptable (list, tuple, ...)" + ) + + def get_resource_classes(self, request): + """ + Return subscriptable type (list, tuple, ...) containing resource classes + :param request: The request object. + :returns: The Resource classes. + """ + if self.resource_classes and self.resource_class: + raise Exception( + "Only one of 'resource_class' and 'resource_classes' can be set" + ) + if hasattr(self, "get_resource_class"): + cls = self.__class__ + warnings.warn( + "The 'get_resource_class()' method has been deprecated. " + "Please implement the new 'get_resource_classes()' method in " + f"{cls.__module__}.{cls.__qualname__}", + DeprecationWarning, + ) + return [self.get_resource_class()] + if self.resource_class: + cls = self.__class__ + warnings.warn( + "The 'resource_class' field has been deprecated. " + "Please implement the new 'resource_classes' field in " + f"{cls.__module__}.{cls.__qualname__}", + DeprecationWarning, + ) + if not self.resource_classes and not self.resource_class: + return [modelresource_factory(self.model)] + if self.resource_classes: + return self.resource_classes + return [self.resource_class] def get_resource_kwargs(self, request, *args, **kwargs): - return {} + """ + Return the kwargs which are to be passed to the Resource constructor. + Can be overridden to provide additional kwarg params. + + :param request: The request object. + :param kwargs: Keyword arguments. + :returns: The Resource kwargs (by default, is the kwargs passed). + """ + return kwargs + + def get_resource_index(self, form): + """ + Return the index of the resource class defined in the form. + + :param form: The form object. + :returns: The index of the resource as an int. + """ + resource_index = 0 + if form and "resource" in form.cleaned_data: + try: + resource_index = int(form.cleaned_data["resource"]) + except ValueError: + pass + return resource_index class BaseImportMixin(BaseImportExportMixin): - def get_import_resource_class(self): + #: If enabled, the import workflow skips the import confirm page + #: and imports the data directly. + #: See :ref:`import_export_skip_admin_confirm`. + skip_import_confirm = False + + def get_import_resource_classes(self, request): """ - Returns ResourceClass to use for import. + :param request: The request object. + Returns ResourceClass subscriptable (list, tuple, ...) to use for import. """ - return self.get_resource_class() + if hasattr(self, "get_import_resource_class"): + cls = self.__class__ + warnings.warn( + "The 'get_import_resource_class()' method has been deprecated. " + "Please implement the new 'get_import_resource_classes()' method in" + f"{cls.__module__}.{cls.__qualname__}", + DeprecationWarning, + ) + return [self.get_import_resource_class()] + resource_classes = self.get_resource_classes(request) + self.check_resource_classes(resource_classes) + return resource_classes def get_import_formats(self): """ Returns available import formats. """ - return [f for f in self.formats if f().can_import()] + return [f for f in self.import_formats if f().can_import()] - def get_import_resource_kwargs(self, request, *args, **kwargs): - return self.get_resource_kwargs(request, *args, **kwargs) + def get_import_resource_kwargs(self, request, **kwargs): + """ + Returns kwargs which will be passed to the Resource constructor. + :param request: The request object. + :param kwargs: Keyword arguments. + :returns: The kwargs (dict) + """ + return self.get_resource_kwargs(request, **kwargs) + + def choose_import_resource_class(self, form, request): + """ + Identify which class should be used for import + :param form: The form object. + :param request: The request object. + :returns: The import Resource class. + """ + resource_index = self.get_resource_index(form) + return self.get_import_resource_classes(request)[resource_index] + + def is_skip_import_confirm_enabled(self): + return ( + getattr(settings, "IMPORT_EXPORT_SKIP_ADMIN_CONFIRM", False) + or self.skip_import_confirm is True + ) class BaseExportMixin(BaseImportExportMixin): model = None + #: If enabled, the export workflow skips the export form and + #: exports the data directly. + #: See :ref:`import_export_skip_admin_export_ui`. + skip_export_form = False + + #: If enabled, the export workflow from Admin UI action menu + #: skips the export form and exports the data directly. + #: See :ref:`import_export_skip_admin_action_export_ui`. + skip_export_form_from_action = False + def get_export_formats(self): """ Returns available export formats. """ - return [f for f in self.formats if f().can_export()] + return [f for f in self.export_formats if f().can_export()] + + def get_export_resource_classes(self, request): + """ + Returns ResourceClass subscriptable (list, tuple, ...) to use for export. + :param request: The request object. + :returns: The Resource classes. + """ + if hasattr(self, "get_export_resource_class"): + cls = self.__class__ + warnings.warn( + "The 'get_export_resource_class()' method has been deprecated. " + "Please implement the new 'get_export_resource_classes()' method " + f"in {cls.__module__}.{cls.__qualname__}", + DeprecationWarning, + ) + return [self.get_export_resource_class()] + resource_classes = self.get_resource_classes(request) + self.check_resource_classes(resource_classes) + return resource_classes + + def choose_export_resource_class(self, form, request): + """ + Identify which class should be used for export + :param request: The request object. + :param form: The form object. + :returns: The export Resource class. + """ + resource_index = self.get_resource_index(form) + return self.get_export_resource_classes(request)[resource_index] - def get_export_resource_class(self): + def get_export_resource_kwargs(self, request, **kwargs): """ - Returns ResourceClass to use for export. + Returns kwargs which will be passed to the Resource constructor. + :param request: The request object. + :param kwargs: Keyword arguments. + :returns: The kwargs (dict) """ - return self.get_resource_class() + return self.get_resource_kwargs(request, **kwargs) - def get_export_resource_kwargs(self, request, *args, **kwargs): - return self.get_resource_kwargs(request, *args, **kwargs) + def get_export_resource_fields_from_form(self, form): + if isinstance(form, SelectableFieldsExportForm): + export_fields = form.get_selected_resource_export_fields() + if export_fields: + return export_fields - def get_data_for_export(self, request, queryset, *args, **kwargs): - resource_class = self.get_export_resource_class() - return resource_class(**self.get_export_resource_kwargs(request, *args, **kwargs))\ - .export(queryset, *args, **kwargs) + return + + def get_data_for_export(self, request, queryset, **kwargs): + export_form = kwargs.get("export_form") + export_class = self.choose_export_resource_class(export_form, request) + export_resource_kwargs = self.get_export_resource_kwargs(request, **kwargs) + export_fields = self.get_export_resource_fields_from_form(export_form) + cls = export_class(**export_resource_kwargs) + export_data = cls.export( + queryset=queryset, export_fields=export_fields, **kwargs + ) + return export_data def get_export_filename(self, file_format): - date_str = now().strftime('%Y-%m-%d') - filename = "%s-%s.%s" % (self.model.__name__, - date_str, - file_format.get_extension()) + date_str = now().strftime("%Y-%m-%d") + filename = "{}-{}.{}".format( + self.model.__name__, + date_str, + file_format.get_extension(), + ) return filename + def is_skip_export_form_enabled(self): + return ( + getattr(settings, "IMPORT_EXPORT_SKIP_ADMIN_EXPORT_UI", False) + or self.skip_export_form is True + ) + + def is_skip_export_form_from_action_enabled(self): + return ( + getattr(settings, "IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI", False) + or self.skip_export_form_from_action is True + ) + class ExportViewMixin(BaseExportMixin): - form_class = ExportForm + # Deprecated, and will be removed in a future release (see #1666) + form_class = SelectableFieldsExportForm - def get_export_data(self, file_format, queryset, *args, **kwargs): + def get_export_data(self, file_format, queryset, **kwargs): """ Returns file_format representation for given queryset. """ - data = self.get_data_for_export(self.request, queryset, *args, **kwargs) + data = self.get_data_for_export(self.request, queryset, **kwargs) export_data = file_format.export_data(data) return export_data @@ -86,17 +275,26 @@ def get_context_data(self, **kwargs): def get_form_kwargs(self): kwargs = super().get_form_kwargs() - kwargs['formats'] = self.get_export_formats() + kwargs["formats"] = self.get_export_formats() + kwargs["resources"] = self.get_export_resource_classes(self.request) return kwargs class ExportViewFormMixin(ExportViewMixin, FormView): + # Deprecated, and will be removed in a future release (see #1666) + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + warn( + "ExportViewFormMixin is deprecated and will be removed " + "in a future release.", + DeprecationWarning, + stacklevel=2, + ) + def form_valid(self, form): formats = self.get_export_formats() - file_format = formats[ - int(form.cleaned_data['file_format']) - ]() - if hasattr(self, 'get_filterset'): + file_format = formats[int(form.cleaned_data["format"])]() + if hasattr(self, "get_filterset"): queryset = self.get_filterset(self.get_filterset_class()).qs else: queryset = self.get_queryset() @@ -107,7 +305,7 @@ def form_valid(self, form): response = HttpResponse(export_data, content_type=content_type) except TypeError: response = HttpResponse(export_data, mimetype=content_type) - response['Content-Disposition'] = 'attachment; filename="%s"' % ( + response["Content-Disposition"] = 'attachment; filename="{}"'.format( self.get_export_filename(file_format), ) diff --git a/import_export/options.py b/import_export/options.py new file mode 100644 index 000000000..1a2d42fc3 --- /dev/null +++ b/import_export/options.py @@ -0,0 +1,198 @@ +class ResourceOptions: + """ + The inner Meta class allows for class-level configuration of how the + Resource should behave. The following options are available: + """ + + model = None + """ + Django Model class or full application label string. It is used to introspect + available fields. + + """ + fields = None + """ + Controls what introspected fields the Resource should include. A whitelist + of fields. + """ + + exclude = None + """ + Controls what introspected fields the Resource should + NOT include. A blacklist of fields. + """ + + instance_loader_class = None + """ + Controls which class instance will take + care of loading existing objects. + """ + + import_id_fields = ["id"] + """ + Controls which object fields will be used to + identify existing instances. + """ + + import_order = None + """ + Controls import order for columns. + """ + + export_order = None + """ + Controls export order for columns. + """ + + widgets = None + """ + This dictionary defines widget kwargs for fields. + """ + + use_transactions = None + """ + Controls if import should use database transactions. Default value is + ``None`` meaning ``settings.IMPORT_EXPORT_USE_TRANSACTIONS`` will be + evaluated. + """ + + skip_unchanged = False + """ + Controls if the import should skip unchanged records. + If ``True``, then each existing instance is compared with the instance to be + imported, and if there are no changes detected, the row is recorded as skipped, + and no database update takes place. + + The advantages of enabling this option are: + + #. Avoids unnecessary database operations which can result in performance + improvements for large datasets. + + #. Skipped records are recorded in each :class:`~import_export.results.RowResult`. + + #. Skipped records are clearly visible in the + :ref:`import confirmation page`. + + For the default ``skip_unchanged`` logic to work, the + :attr:`~import_export.resources.ResourceOptions.skip_diff` must also be ``False`` + (which is the default): + + Default value is ``False``. + """ + + report_skipped = True + """ + Controls if the result reports skipped rows. Default value is ``True``. + """ + + clean_model_instances = False + """ + Controls whether + `full_clean `_ + is called during the import + process to identify potential validation errors for each (non skipped) row. + The default value is ``False``. + """ # noqa: E501 + + chunk_size = None + """ + Controls the chunk_size argument of Queryset.iterator or, + if prefetch_related is used, the per_page attribute of Paginator. + """ + + skip_diff = False + """ + Controls whether or not an instance should be diffed following import. + + By default, an instance is copied prior to insert, update or delete. + After each row is processed, the instance's copy is diffed against the original, + and the value stored in each :class:`~import_export.results.RowResult`. + If diffing is not required, then disabling the diff operation by setting this value + to ``True`` improves performance, because the copy and comparison operations are + skipped for each row. + + If enabled, then :meth:`~import_export.resources.Resource.skip_row` checks do not + execute, because 'skip' logic requires comparison between the stored and imported + versions of a row. + + If enabled, then HTML row reports are also not generated, meaning that the + :attr:`~import_export.resources.ResourceOptions.skip_html_diff` value is ignored. + + The default value is ``False``. + """ + + skip_html_diff = False + """ + Controls whether or not a HTML report is generated after each row. + By default, the difference between a stored copy and an imported instance + is generated in HTML form and stored in each + :class:`~import_export.results.RowResult`. + + The HTML report is used to present changes in the + :ref:`import confirmation page` in the admin site, hence when this + value is ``True``, then changes will not be presented on the confirmation screen. + + If the HTML report is not required, then setting this value to ``True`` improves + performance, because the HTML generation is skipped for each row. + This is a useful optimization when importing large datasets. + + The default value is ``False``. + """ + + use_bulk = False + """ + Controls whether import operations should be performed in bulk. + By default, an object's save() method is called for each row in a data set. + When bulk is enabled, objects are saved using bulk operations. + """ + + batch_size = 1000 + """ + The batch_size parameter controls how many objects are created in a single query. + The default is to create objects in batches of 1000. + See `bulk_create() + `_. + This parameter is only used if ``use_bulk`` is ``True``. + """ + + force_init_instance = False + """ + If ``True``, this parameter will prevent imports from checking the database for + existing instances. + Enabling this parameter is a performance enhancement if your import dataset is + guaranteed to contain new instances. + """ + + using_db = None + """ + DB Connection name to use for db transactions. If not provided, + ``router.db_for_write(model)`` will be evaluated and if it's missing, + ``DEFAULT_DB_ALIAS`` constant ("default") is used. + """ + + store_row_values = False + """ + If True, each row's raw data will be stored in each + :class:`~import_export.results.RowResult`. + Enabling this parameter will increase the memory usage during import + which should be considered when importing large datasets. + """ + + store_instance = False + """ + If True, the row instance will be stored in each + :class:`~import_export.results.RowResult`. + Enabling this parameter will increase the memory usage during import + which should be considered when importing large datasets. + + This value will always be set to ``True`` when importing via the Admin UI. + This is so that appropriate ``LogEntry`` instances can be created. + """ + + use_natural_foreign_keys = False + """ + If ``True``, this value will be passed to all foreign + key widget fields whose models support natural foreign keys. That is, + the model has a natural_key function and the manager has a + ``get_by_natural_key()`` function. + """ diff --git a/import_export/resources.py b/import_export/resources.py index e00d3ba0c..2295881a8 100644 --- a/import_export/resources.py +++ b/import_export/resources.py @@ -1,10 +1,10 @@ import functools import logging -import traceback from collections import OrderedDict from copy import deepcopy +from html import escape +from warnings import warn -import django import tablib from diff_match_patch import diff_match_patch from django.conf import settings @@ -12,224 +12,42 @@ from django.core.management.color import no_style from django.core.paginator import Paginator from django.db import connections, router -from django.db.models.fields.related import ForeignObjectRel +from django.db.models import fields +from django.db.models.fields.related import ForeignKey from django.db.models.query import QuerySet -from django.db.transaction import ( - TransactionManagementError, - savepoint, - savepoint_commit, - savepoint_rollback, -) +from django.db.transaction import TransactionManagementError, set_rollback from django.utils.encoding import force_str from django.utils.safestring import mark_safe +from django.utils.translation import gettext_lazy as _ -from . import widgets +from . import exceptions, widgets +from .declarative import DeclarativeMetaclass, ModelDeclarativeMetaclass from .fields import Field -from .instance_loaders import ModelInstanceLoader from .results import Error, Result, RowResult -from .utils import atomic_if_using_transaction - -if django.VERSION[0] >= 3: - from django.core.exceptions import FieldDoesNotExist -else: - from django.db.models.fields import FieldDoesNotExist - +from .utils import atomic_if_using_transaction, get_related_model logger = logging.getLogger(__name__) # Set default logging handler to avoid "No handler found" warnings. logger.addHandler(logging.NullHandler()) -def get_related_model(field): - if hasattr(field, 'related_model'): - return field.related_model - # Django 1.6, 1.7 - if field.rel: - return field.rel.to - - -class ResourceOptions: - """ - The inner Meta class allows for class-level configuration of how the - Resource should behave. The following options are available: - """ - - model = None - """ - Django Model class. It is used to introspect available - fields. - - """ - fields = None - """ - Controls what introspected fields the Resource should include. A whitelist - of fields. - """ - - exclude = None - """ - Controls what introspected fields the Resource should - NOT include. A blacklist of fields. - """ - - instance_loader_class = None - """ - Controls which class instance will take - care of loading existing objects. - """ - - import_id_fields = ['id'] - """ - Controls which object fields will be used to - identify existing instances. - """ - - export_order = None - """ - Controls export order for columns. - """ - - widgets = None - """ - This dictionary defines widget kwargs for fields. - """ - - use_transactions = None - """ - Controls if import should use database transactions. Default value is - ``None`` meaning ``settings.IMPORT_EXPORT_USE_TRANSACTIONS`` will be - evaluated. - """ - - skip_unchanged = False - """ - Controls if the import should skip unchanged records. Default value is - False - """ - - report_skipped = True - """ - Controls if the result reports skipped rows. Default value is True - """ - - clean_model_instances = False - """ - Controls whether ``instance.full_clean()`` is called during the import - process to identify potential validation errors for each (non skipped) row. - The default value is False. - """ - - chunk_size = None - """ - Controls the chunk_size argument of Queryset.iterator or, - if prefetch_related is used, the per_page attribute of Paginator. - """ - - skip_diff = False - """ - Controls whether or not an instance should be diffed following import. - By default, an instance is copied prior to insert, update or delete. - After each row is processed, the instance's copy is diffed against the original, and the value - stored in each ``RowResult``. - If diffing is not required, then disabling the diff operation by setting this value to ``True`` - improves performance, because the copy and comparison operations are skipped for each row. - If enabled, then ``skip_row()`` checks do not execute, because 'skip' logic requires - comparison between the stored and imported versions of a row. - If enabled, then HTML row reports are also not generated (see ``skip_html_diff``). - The default value is False. - """ - - skip_html_diff = False - """ - Controls whether or not a HTML report is generated after each row. - By default, the difference between a stored copy and an imported instance - is generated in HTML form and stored in each ``RowResult``. - The HTML report is used to present changes on the confirmation screen in the admin site, - hence when this value is ``True``, then changes will not be presented on the confirmation - screen. - If the HTML report is not required, then setting this value to ``True`` improves performance, - because the HTML generation is skipped for each row. - This is a useful optimization when importing large datasets. - The default value is False. - """ - - use_bulk = False - """ - Controls whether import operations should be performed in bulk. - By default, an object's save() method is called for each row in a data set. - When bulk is enabled, objects are saved using bulk operations. - """ - - batch_size = 1000 +def has_natural_foreign_key(model): """ - The batch_size parameter controls how many objects are created in a single query. - The default is to create objects in batches of 1000. - See `bulk_create() `_. - This parameter is only used if ``use_bulk`` is True. + Determine if a model has natural foreign key functions """ - - force_init_instance = False - """ - If True, this parameter will prevent imports from checking the database for existing instances. - Enabling this parameter is a performance enhancement if your import dataset is guaranteed to - contain new instances. - """ - - using_db = None - """ - DB Connection name to use for db transactions. If not provided, - ``router.db_for_write(model)`` will be evaluated and if it's missing, - DEFAULT_DB_ALIAS constant ("default") is used. - """ - - -class DeclarativeMetaclass(type): - - def __new__(cls, name, bases, attrs): - declared_fields = [] - meta = ResourceOptions() - - # If this class is subclassing another Resource, add that Resource's - # fields. Note that we loop over the bases in *reverse*. This is - # necessary in order to preserve the correct order of fields. - for base in bases[::-1]: - if hasattr(base, 'fields'): - declared_fields = list(base.fields.items()) + declared_fields - # Collect the Meta options - options = getattr(base, 'Meta', None) - for option in [option for option in dir(options) - if not option.startswith('_') and hasattr(options, option)]: - setattr(meta, option, getattr(options, option)) - - # Add direct fields - for field_name, obj in attrs.copy().items(): - if isinstance(obj, Field): - field = attrs.pop(field_name) - if not field.column_name: - field.column_name = field_name - declared_fields.append((field_name, field)) - - attrs['fields'] = OrderedDict(declared_fields) - new_class = super().__new__(cls, name, bases, attrs) - - # Add direct options - options = getattr(new_class, 'Meta', None) - for option in [option for option in dir(options) - if not option.startswith('_') and hasattr(options, option)]: - setattr(meta, option, getattr(options, option)) - new_class._meta = meta - - return new_class + return hasattr(model, "natural_key") and hasattr( + model.objects, "get_by_natural_key" + ) class Diff: def __init__(self, resource, instance, new): - self.left = self._export_resource_fields(resource, instance) + self.left = Diff._read_field_values(resource, instance) self.right = [] self.new = new - def compare_with(self, resource, instance, dry_run=False): - self.right = self._export_resource_fields(resource, instance) + def compare_with(self, resource, instance): + self.right = Diff._read_field_values(resource, instance) def as_html(self): data = [] @@ -244,8 +62,9 @@ def as_html(self): data.append(html) return data - def _export_resource_fields(self, resource, instance): - return [resource.export_field(f, instance) if instance else "" for f in resource.get_user_visible_fields()] + @classmethod + def _read_field_values(cls, resource, instance): + return [f.export(instance) for f in resource.get_import_fields()] class Resource(metaclass=DeclarativeMetaclass): @@ -254,7 +73,12 @@ class Resource(metaclass=DeclarativeMetaclass): representations and handle importing and exporting data. """ - def __init__(self): + def __init__(self, **kwargs): + """ + kwargs: + An optional dict of kwargs. + Subclasses can use kwargs to pass dynamic values to enhance import / exports. + """ # The fields class attribute is the *class-wide* definition of # fields. Because a particular *instance* of the class might want to # alter self.fields, we create self.fields here by copying cls.fields. @@ -263,9 +87,9 @@ def __init__(self): self.fields = deepcopy(self.fields) # lists to hold model instances in memory when bulk operations are enabled - self.create_instances = list() - self.update_instances = list() - self.delete_instances = list() + self.create_instances = [] + self.update_instances = [] + self.delete_instances = [] @classmethod def get_result_class(self): @@ -295,6 +119,7 @@ def get_diff_class(self): """ return Diff + @classmethod def get_db_connection_name(self): if self._meta.using_db is None: return router.db_for_write(self._meta.model) @@ -303,22 +128,24 @@ def get_db_connection_name(self): def get_use_transactions(self): if self._meta.use_transactions is None: - return getattr(settings, 'IMPORT_EXPORT_USE_TRANSACTIONS', True) + return getattr(settings, "IMPORT_EXPORT_USE_TRANSACTIONS", True) else: return self._meta.use_transactions def get_chunk_size(self): if self._meta.chunk_size is None: - return getattr(settings, 'IMPORT_EXPORT_CHUNK_SIZE', 100) + return getattr(settings, "IMPORT_EXPORT_CHUNK_SIZE", 100) else: return self._meta.chunk_size def get_fields(self, **kwargs): - """ - Returns fields sorted according to - :attr:`~import_export.resources.ResourceOptions.export_order`. - """ - return [self.fields[f] for f in self.get_export_order()] + warn( + "The 'get_fields()' method is deprecated and will be removed " + "in a future release", + DeprecationWarning, + stacklevel=2, + ) + return list(self.fields.values()) def get_field_name(self, field): """ @@ -327,8 +154,9 @@ def get_field_name(self, field): for field_name, f in self.fields.items(): if f == field: return field_name - raise AttributeError("Field %s does not exists in %s resource" % ( - field, self.__class__)) + raise AttributeError( + f"Field {field} does not exists in {self.__class__} resource" + ) def init_instance(self, row=None): """ @@ -339,15 +167,14 @@ def init_instance(self, row=None): def get_instance(self, instance_loader, row): """ - If all 'import_id_fields' are present in the dataset, calls - the :doc:`InstanceLoader `. Otherwise, - returns `None`. + Calls the :doc:`InstanceLoader `. """ - import_id_fields = [ - self.fields[f] for f in self.get_import_id_fields() - ] + import_id_fields = [self.fields[f] for f in self.get_import_id_fields()] for field in import_id_fields: if field.column_name not in row: + # if there is an 'import id field' which is not defined in the + # row, then it is not possible to return an existing instance, + # so no need to proceed any further return return instance_loader.get_instance(row) @@ -358,22 +185,24 @@ def get_or_init_instance(self, instance_loader, row): if not self._meta.force_init_instance: instance = self.get_instance(instance_loader, row) if instance: - return (instance, False) - return (self.init_instance(row), True) + return instance, False + return self.init_instance(row), True def get_import_id_fields(self): - """ - """ + """ """ return self._meta.import_id_fields def get_bulk_update_fields(self): """ Returns the fields to be included in calls to bulk_update(). - ``import_id_fields`` are removed because `id` fields cannot be supplied to bulk_update(). + ``import_id_fields`` are removed because `id` fields cannot be supplied to + bulk_update(). """ return [f for f in self.fields if f not in self._meta.import_id_fields] - def bulk_create(self, using_transactions, dry_run, raise_errors, batch_size=None): + def bulk_create( + self, using_transactions, dry_run, raise_errors, batch_size=None, result=None + ): """ Creates objects by calling ``bulk_create``. """ @@ -382,15 +211,17 @@ def bulk_create(self, using_transactions, dry_run, raise_errors, batch_size=None if not using_transactions and dry_run: pass else: - self._meta.model.objects.bulk_create(self.create_instances, batch_size=batch_size) + self._meta.model.objects.bulk_create( + self.create_instances, batch_size=batch_size + ) except Exception as e: - logger.exception(e) - if raise_errors: - raise e + self.handle_import_error(result, e, raise_errors) finally: self.create_instances.clear() - def bulk_update(self, using_transactions, dry_run, raise_errors, batch_size=None): + def bulk_update( + self, using_transactions, dry_run, raise_errors, batch_size=None, result=None + ): """ Updates objects by calling ``bulk_update``. """ @@ -399,16 +230,17 @@ def bulk_update(self, using_transactions, dry_run, raise_errors, batch_size=None if not using_transactions and dry_run: pass else: - self._meta.model.objects.bulk_update(self.update_instances, self.get_bulk_update_fields(), - batch_size=batch_size) + self._meta.model.objects.bulk_update( + self.update_instances, + self.get_bulk_update_fields(), + batch_size=batch_size, + ) except Exception as e: - logger.exception(e) - if raise_errors: - raise e + self.handle_import_error(result, e, raise_errors) finally: self.update_instances.clear() - def bulk_delete(self, using_transactions, dry_run, raise_errors): + def bulk_delete(self, using_transactions, dry_run, raise_errors, result=None): """ Deletes objects by filtering on a list of instances to be deleted, then calling ``delete()`` on the entire queryset. @@ -421,23 +253,23 @@ def bulk_delete(self, using_transactions, dry_run, raise_errors): delete_ids = [o.pk for o in self.delete_instances] self._meta.model.objects.filter(pk__in=delete_ids).delete() except Exception as e: - logger.exception(e) - if raise_errors: - raise e + self.handle_import_error(result, e, raise_errors) finally: self.delete_instances.clear() - def validate_instance(self, instance, import_validation_errors=None, validate_unique=True): + def validate_instance( + self, instance, import_validation_errors=None, validate_unique=True + ): """ Takes any validation errors that were raised by - :meth:`~import_export.resources.Resource.import_obj`, and combines them + :meth:`~import_export.resources.Resource.import_instance`, and combines them with validation errors raised by the instance's ``full_clean()`` method. The combined errors are then re-raised as single, multi-field ValidationError. If the ``clean_model_instances`` option is False, the instances's ``full_clean()`` method is not called, and only the errors raised by - ``import_obj()`` are re-raised. + ``import_instance()`` are re-raised. """ if import_validation_errors is None: errors = {} @@ -455,112 +287,228 @@ def validate_instance(self, instance, import_validation_errors=None, validate_un if errors: raise ValidationError(errors) - def save_instance(self, instance, using_transactions=True, dry_run=False): - """ + def save_instance(self, instance, is_create, row, **kwargs): + r""" Takes care of saving the object to the database. Objects can be created in bulk if ``use_bulk`` is enabled. + + :param instance: The instance of the object to be persisted. + + :param is_create: A boolean flag to indicate whether this is a new object + to be created, or an existing object to be updated. + + :param row: A dict representing the import row. + + :param \**kwargs: + See :meth:`import_row """ - self.before_save_instance(instance, using_transactions, dry_run) + self.before_save_instance(instance, row, **kwargs) if self._meta.use_bulk: - if instance.pk: - self.update_instances.append(instance) - else: + if is_create: self.create_instances.append(instance) + else: + self.update_instances.append(instance) else: - if not using_transactions and dry_run: + if not self._is_using_transactions(kwargs) and self._is_dry_run(kwargs): # we don't have transactions and we want to do a dry_run pass else: - instance.save() - self.after_save_instance(instance, using_transactions, dry_run) + self.do_instance_save(instance, is_create) + self.after_save_instance(instance, row, **kwargs) - def before_save_instance(self, instance, using_transactions, dry_run): + def do_instance_save(self, instance, is_create): """ + A method specifically to provide a single overridable hook for the instance + save operation. + For example, this can be overridden to implement update_or_create(). + + :param instance: The model instance to be saved. + :param is_create: A boolean flag to indicate whether this is a new object + to be created, or an existing object to be updated. + """ + instance.save() + + def before_save_instance(self, instance, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def after_save_instance(self, instance, using_transactions, dry_run): - """ + def after_save_instance(self, instance, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def delete_instance(self, instance, using_transactions=True, dry_run=False): - """ + def delete_instance(self, instance, row, **kwargs): + r""" Calls :meth:`instance.delete` as long as ``dry_run`` is not set. If ``use_bulk`` then instances are appended to a list for bulk import. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ - self.before_delete_instance(instance, dry_run) + self.before_delete_instance(instance, row, **kwargs) if self._meta.use_bulk: self.delete_instances.append(instance) else: - if not using_transactions and dry_run: + if not self._is_using_transactions(kwargs) and self._is_dry_run(kwargs): # we don't have transactions and we want to do a dry_run pass else: instance.delete() - self.after_delete_instance(instance, dry_run) + self.after_delete_instance(instance, row, **kwargs) - def before_delete_instance(self, instance, dry_run): - """ + def before_delete_instance(self, instance, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def after_delete_instance(self, instance, dry_run): - """ + def after_delete_instance(self, instance, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def import_field(self, field, obj, data, is_m2m=False, **kwargs): - """ - Calls :meth:`import_export.fields.Field.save` if ``Field.attribute`` - is specified, and ``Field.column_name`` is found in ``data``. + def import_field(self, field, instance, row, is_m2m=False, **kwargs): + r""" + Handles persistence of the field data. + + :param field: A :class:`import_export.fields.Field` instance. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param is_m2m: A boolean value indicating whether or not this is a + many-to-many field. + + :param \**kwargs: + See :meth:`import_row` """ - if field.attribute and field.column_name in data: - field.save(obj, data, is_m2m, **kwargs) + if not field.attribute: + logger.debug(f"skipping field '{field}' - field attribute is not defined") + return + if field.column_name not in row: + logger.debug( + f"skipping field '{field}' " + f"- column name '{field.column_name}' is not present in row" + ) + return + field.save(instance, row, is_m2m, **kwargs) def get_import_fields(self): - return self.get_fields() + import_fields = [] + for field_name in self.get_import_order(): + if field_name in self.fields: + import_fields.append(self.fields[field_name]) + continue + # issue 1815 + # allow for fields to be referenced by column_name in `fields` list + for field in self.fields.values(): + if field.column_name == field_name: + import_fields.append(field) + continue + return import_fields def import_obj(self, obj, data, dry_run, **kwargs): - """ + warn( + "The 'import_obj' method is deprecated and will be replaced " + "with 'import_instance(self, instance, row, **kwargs)' " + "in a future release. Refer to Release Notes for details.", + DeprecationWarning, + stacklevel=2, + ) + if dry_run is True: + kwargs.update({"dry_run": dry_run}) + self.import_instance(obj, data, **kwargs) + + def import_instance(self, instance, row, **kwargs): + r""" Traverses every field in this Resource and calls :meth:`~import_export.resources.Resource.import_field`. If ``import_field()`` results in a ``ValueError`` being raised for one of more fields, those errors are captured and reraised as a single, - multi-field ValidationError.""" + multi-field ValidationError. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` + """ errors = {} for field in self.get_import_fields(): if isinstance(field.widget, widgets.ManyToManyWidget): continue try: - self.import_field(field, obj, data, **kwargs) + self.import_field(field, instance, row, **kwargs) except ValueError as e: - errors[field.attribute] = ValidationError( - force_str(e), code="invalid") + errors[field.attribute] = ValidationError(force_str(e), code="invalid") if errors: raise ValidationError(errors) - def save_m2m(self, obj, data, using_transactions, dry_run): - """ + def save_m2m(self, instance, row, **kwargs): + r""" Saves m2m fields. Model instance need to have a primary key value before a many-to-many relationship can be used. + + :param instance: A new or existing model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ + using_transactions = self._is_using_transactions(kwargs) + dry_run = self._is_dry_run(kwargs) if (not using_transactions and dry_run) or self._meta.use_bulk: # we don't have transactions and we want to do a dry_run - # OR use_bulk is enabled (m2m operations are not supported for bulk operations) + # OR use_bulk is enabled (m2m operations are not supported + # for bulk operations) pass else: for field in self.get_import_fields(): if not isinstance(field.widget, widgets.ManyToManyWidget): continue - self.import_field(field, obj, data, True) + self.import_field(field, instance, row, True) def for_delete(self, row, instance): """ @@ -568,10 +516,14 @@ def for_delete(self, row, instance): Default implementation returns ``False``. Override this method to handle deletion. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param instance: A new or existing model instance. """ return False - def skip_row(self, instance, original): + def skip_row(self, instance, original, row, import_validation_errors=None): """ Returns ``True`` if ``row`` importing should be skipped. @@ -581,29 +533,62 @@ def skip_row(self, instance, original): If skip_diff is True, then no comparisons can be made because ``original`` will be None. - When left unspecified, skip_diff and skip_unchanged both default to ``False``, - and rows are never skipped. + When left unspecified, skip_diff and skip_unchanged both default to ``False``, + and rows are never skipped. + + By default, rows are not skipped if validation errors have been detected + during import. You can change this behavior and choose to ignore validation + errors by overriding this method. Override this method to handle skipping rows meeting certain conditions. Use ``super`` if you want to preserve default handling while overriding :: + class YourResource(ModelResource): - def skip_row(self, instance, original): + def skip_row(self, instance, original, + row, import_validation_errors=None): # Add code here - return super(YourResource, self).skip_row(instance, original) + return super().skip_row(instance, original, row, + import_validation_errors=import_validation_errors) + + :param instance: A new or updated model instance. + + :param original: The original persisted model instance. + + :param row: A ``dict`` containing key / value data for the row to be imported. + :param import_validation_errors: A ``dict`` containing key / value data for any + identified validation errors. """ - if not self._meta.skip_unchanged or self._meta.skip_diff: + if ( + not self._meta.skip_unchanged + or self._meta.skip_diff + or import_validation_errors + ): return False for field in self.get_import_fields(): - try: - # For fields that are models.fields.related.ManyRelatedManager - # we need to compare the results - if list(field.get_value(instance).all()) != list(field.get_value(original).all()): + # For fields that are models.fields.related.ManyRelatedManager + # we need to compare the results + if isinstance(field.widget, widgets.ManyToManyWidget): + # #1437 - handle m2m field not present in import file + if field.column_name not in row.keys(): + continue + # m2m instance values are taken from the 'row' because they + # have not been written to the 'instance' at this point + instance_values = list(field.clean(row)) + original_values = ( + [] if original.pk is None else list(field.get_value(original).all()) + ) + if len(instance_values) != len(original_values): + return False + + if sorted(v.pk for v in instance_values) != sorted( + v.pk for v in original_values + ): return False - except AttributeError: + else: if field.get_value(instance) != field.get_value(original): return False return True @@ -612,65 +597,134 @@ def get_diff_headers(self): """ Diff representation headers. """ - return self.get_user_visible_headers() + return [force_str(field.column_name) for field in self.get_import_fields()] - def before_import(self, dataset, using_transactions, dry_run, **kwargs): - """ + def before_import(self, dataset, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param dataset: A ``tablib.Dataset``. + + :param \**kwargs: + See :meth:`import_row` """ pass - def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): - """ + def after_import(self, dataset, result, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param dataset: A ``tablib.Dataset``. + + :param result: A :class:`import_export.results.Result` implementation + containing a summary of the import. + + :param \**kwargs: + See :meth:`import_row` """ pass - def before_import_row(self, row, row_number=None, **kwargs): - """ + def before_import_row(self, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def after_import_row(self, row, row_result, row_number=None, **kwargs): - """ + def after_import_row(self, row, row_result, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param row_result: A ``RowResult`` instance. + References the persisted ``instance`` as an attribute. + + :param \**kwargs: + See :meth:`import_row` """ pass def after_import_instance(self, instance, new, row_number=None, **kwargs): - """ + warn( + "The 'after_import_instance' method is deprecated and will be replaced " + "with 'after_init_instance(self, instance, new, row, **kwargs)' " + "in a future release. Refer to Release Notes for details.", + DeprecationWarning, + stacklevel=2, + ) + if row_number is not None: + kwargs.update({"row_number": row_number}) + self.after_init_instance(instance, new, None, **kwargs) + + def after_init_instance(self, instance, new, row, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param instance: A new or existing model instance. + + :param new: a boolean flag indicating whether instance is new or existing. + + :param row: A ``dict`` containing key / value data for the row to be imported. + + :param \**kwargs: + See :meth:`import_row` """ pass - def import_row(self, row, instance_loader, using_transactions=True, dry_run=False, raise_errors=False, **kwargs): - """ + def handle_import_error(self, result, error, raise_errors=False): + logger.debug(error, exc_info=error) + if result: + result.append_base_error(self.get_error_result_class()(error)) + if raise_errors: + raise exceptions.ImportError(error) + + def import_row(self, row, instance_loader, **kwargs): + r""" Imports data from ``tablib.Dataset``. Refer to :doc:`import_workflow` for a more complete description of the whole import process. - :param row: A ``dict`` of the row to import + :param row: A ``dict`` of the 'row' to import. + A row is a dict of data fields so can be a csv line, a JSON object, + a YAML object etc. - :param instance_loader: The instance loader to be used to load the row + :param instance_loader: The instance loader to be used to load the model + instance associated with the row (if there is one). - :param using_transactions: If ``using_transactions`` is set, a transaction - is being used to wrap the import + :param \**kwargs: + See below. - :param dry_run: If ``dry_run`` is set, or error occurs, transaction - will be rolled back. + :Keyword Arguments: + * dry_run (``boolean``) -- + A True value means that no data should be persisted. + * use_transactions (``boolean``) -- + A True value means that transactions will be rolled back. + * row_number (``int``) -- + The index of the row being imported. """ skip_diff = self._meta.skip_diff + + if not self._meta.store_instance: + self._meta.store_instance = kwargs.get( + "retain_instance_in_row_result", False + ) + row_result = self.get_row_result_class()() + if self._meta.store_row_values: + row_result.row_values = row original = None try: self.before_import_row(row, **kwargs) instance, new = self.get_or_init_instance(instance_loader, row) - self.after_import_instance(instance, new, **kwargs) + self.after_init_instance(instance, new, row, **kwargs) if new: row_result.import_type = RowResult.IMPORT_TYPE_NEW else: row_result.import_type = RowResult.IMPORT_TYPE_UPDATE - row_result.new_record = new if not skip_diff: original = deepcopy(instance) diff = self.get_diff_class()(self, original, new) @@ -678,31 +732,41 @@ def import_row(self, row, instance_loader, using_transactions=True, dry_run=Fals if new: row_result.import_type = RowResult.IMPORT_TYPE_SKIP if not skip_diff: - diff.compare_with(self, None, dry_run) + diff.compare_with(self, None) else: row_result.import_type = RowResult.IMPORT_TYPE_DELETE row_result.add_instance_info(instance) - self.delete_instance(instance, using_transactions, dry_run) + if self._meta.store_instance: + # create a copy before deletion so id fields are retained + row_result.instance = deepcopy(instance) + self.delete_instance(instance, row, **kwargs) if not skip_diff: - diff.compare_with(self, None, dry_run) + diff.compare_with(self, None) else: import_validation_errors = {} try: - self.import_obj(instance, row, dry_run, **kwargs) + self.import_instance(instance, row, **kwargs) except ValidationError as e: - # Validation errors from import_obj() are passed on to - # validate_instance(), where they can be combined with model - # instance validation errors if necessary - import_validation_errors = e.update_error_dict(import_validation_errors) - if self.skip_row(instance, original): + # Validation errors are passed on to validate_instance(), + # where they can be combined with model instance validation + # errors if necessary + import_validation_errors = e.update_error_dict( + import_validation_errors + ) + + if self.skip_row(instance, original, row, import_validation_errors): row_result.import_type = RowResult.IMPORT_TYPE_SKIP else: self.validate_instance(instance, import_validation_errors) - self.save_instance(instance, using_transactions, dry_run) - self.save_m2m(instance, row, using_transactions, dry_run) - row_result.add_instance_info(instance) + self.save_instance(instance, new, row, **kwargs) + self.save_m2m(instance, row, **kwargs) + row_result.add_instance_info(instance) + if self._meta.store_instance: + row_result.instance = instance if not skip_diff: - diff.compare_with(self, instance, dry_run) + diff.compare_with(self, instance) + if not new: + row_result.original = original if not skip_diff and not self._meta.skip_html_diff: row_result.diff = diff.as_html() @@ -716,46 +780,51 @@ def import_row(self, row, instance_loader, using_transactions=True, dry_run=Fals # There is no point logging a transaction error for each row # when only the original error is likely to be relevant if not isinstance(e, TransactionManagementError): - logger.debug(e, exc_info=e) - tb_info = traceback.format_exc() - row_result.errors.append(self.get_error_result_class()(e, tb_info, row)) - - if self._meta.use_bulk: - # persist a batch of rows - # because this is a batch, any exceptions are logged and not associated - # with a specific row - if len(self.create_instances) == self._meta.batch_size: - self.bulk_create(using_transactions, dry_run, raise_errors, batch_size=self._meta.batch_size) - if len(self.update_instances) == self._meta.batch_size: - self.bulk_update(using_transactions, dry_run, raise_errors, batch_size=self._meta.batch_size) - if len(self.delete_instances) == self._meta.batch_size: - self.bulk_delete(using_transactions, dry_run, raise_errors) + logger.debug(e, exc_info=True) + row_result.errors.append( + self.get_error_result_class()(e, row=row, number=kwargs["row_number"]) + ) return row_result - def import_data(self, dataset, dry_run=False, raise_errors=False, - use_transactions=None, collect_failed_rows=False, - rollback_on_validation_errors=False, **kwargs): - """ + def import_data( + self, + dataset, + dry_run=False, + raise_errors=False, + use_transactions=None, + collect_failed_rows=False, + rollback_on_validation_errors=False, + **kwargs, + ): + r""" Imports data from ``tablib.Dataset``. Refer to :doc:`import_workflow` for a more complete description of the whole import process. - :param dataset: A ``tablib.Dataset`` + :param dataset: A ``tablib.Dataset``. :param raise_errors: Whether errors should be printed to the end user - or raised regularly. + or raised regularly. :param use_transactions: If ``True`` the import process will be processed - inside a transaction. + inside a transaction. - :param collect_failed_rows: If ``True`` the import process will collect - failed rows. + :param collect_failed_rows: + If ``True`` the import process will create a new dataset object comprising + failed rows and errors. + This can be useful for debugging purposes but will cause higher memory usage + for larger datasets. + See :attr:`~import_export.results.Result.failed_dataset`. - :param rollback_on_validation_errors: If both ``use_transactions`` and ``rollback_on_validation_errors`` - are set to ``True``, the import process will be rolled back in case of ValidationError. + :param rollback_on_validation_errors: If both ``use_transactions`` and + ``rollback_on_validation_errors`` are set to ``True``, the import process will + be rolled back in case of ValidationError. :param dry_run: If ``dry_run`` is set, or an error occurs, if a transaction is being used, it will be rolled back. + + :param \**kwargs: + Metadata which may be associated with the import. """ if use_transactions is None: @@ -763,43 +832,57 @@ def import_data(self, dataset, dry_run=False, raise_errors=False, db_connection = self.get_db_connection_name() connection = connections[db_connection] - supports_transactions = getattr(connection.features, "supports_transactions", False) + supports_transactions = getattr( + connection.features, "supports_transactions", False + ) if use_transactions and not supports_transactions: raise ImproperlyConfigured using_transactions = (use_transactions or dry_run) and supports_transactions - if self._meta.batch_size is not None and (not isinstance(self._meta.batch_size, int) or self._meta.batch_size < 0): + if self._meta.batch_size is not None and ( + not isinstance(self._meta.batch_size, int) or self._meta.batch_size < 0 + ): raise ValueError("Batch size must be a positive integer") with atomic_if_using_transaction(using_transactions, using=db_connection): - return self.import_data_inner( - dataset, dry_run, raise_errors, using_transactions, collect_failed_rows, - rollback_on_validation_errors, **kwargs) + result = self.import_data_inner( + dataset, + dry_run, + raise_errors, + using_transactions, + collect_failed_rows, + **kwargs, + ) + if using_transactions and ( + dry_run + or result.has_errors() + or (rollback_on_validation_errors and result.has_validation_errors()) + ): + set_rollback(True, using=db_connection) + return result def import_data_inner( - self, dataset, dry_run, raise_errors, using_transactions, - collect_failed_rows, rollback_on_validation_errors=False, **kwargs): + self, + dataset, + dry_run, + raise_errors, + using_transactions, + collect_failed_rows, + **kwargs, + ): result = self.get_result_class()() result.diff_headers = self.get_diff_headers() result.total_rows = len(dataset) db_connection = self.get_db_connection_name() - if using_transactions: - # when transactions are used we want to create/update/delete object - # as transaction will be rolled back if dry_run is set - sp1 = savepoint(using=db_connection) - try: with atomic_if_using_transaction(using_transactions, using=db_connection): - self.before_import(dataset, using_transactions, dry_run, **kwargs) + self.before_import(dataset, **kwargs) + self._check_import_id_fields(dataset.headers) except Exception as e: - logger.debug(e, exc_info=e) - tb_info = traceback.format_exc() - result.append_base_error(self.get_error_result_class()(e, tb_info)) - if raise_errors: - raise + self.handle_import_error(result, e, raise_errors) instance_loader = self._meta.instance_loader_class(self, dataset) @@ -809,102 +892,179 @@ def import_data_inner( if collect_failed_rows: result.add_dataset_headers(dataset.headers) - for i, row in enumerate(dataset.dict, 1): - with atomic_if_using_transaction(using_transactions, using=db_connection): + for i, data_row in enumerate(dataset, 1): + row = OrderedDict(zip(dataset.headers, data_row)) + with atomic_if_using_transaction( + using_transactions and not self._meta.use_bulk, using=db_connection + ): + kwargs.update( + { + "dry_run": dry_run, + "using_transactions": using_transactions, + "row_number": i, + } + ) row_result = self.import_row( row, instance_loader, - using_transactions=using_transactions, - dry_run=dry_run, - row_number=i, - raise_errors=raise_errors, - **kwargs + **kwargs, ) + if self._meta.use_bulk: + # persist a batch of rows + # because this is a batch, any exceptions are logged and not associated + # with a specific row + if len(self.create_instances) == self._meta.batch_size: + with atomic_if_using_transaction( + using_transactions, using=db_connection + ): + self.bulk_create( + using_transactions, + dry_run, + raise_errors, + batch_size=self._meta.batch_size, + result=result, + ) + if len(self.update_instances) == self._meta.batch_size: + with atomic_if_using_transaction( + using_transactions, using=db_connection + ): + self.bulk_update( + using_transactions, + dry_run, + raise_errors, + batch_size=self._meta.batch_size, + result=result, + ) + if len(self.delete_instances) == self._meta.batch_size: + with atomic_if_using_transaction( + using_transactions, using=db_connection + ): + self.bulk_delete( + using_transactions, dry_run, raise_errors, result=result + ) + result.increment_row_result_total(row_result) if row_result.errors: + result.append_error_row(i, row, row_result.errors) if collect_failed_rows: result.append_failed_row(row, row_result.errors[0]) if raise_errors: - raise row_result.errors[-1].error + raise exceptions.ImportError( + row_result.errors[-1].error, number=i, row=row + ) elif row_result.validation_error: result.append_invalid_row(i, row, row_result.validation_error) if collect_failed_rows: result.append_failed_row(row, row_result.validation_error) if raise_errors: - raise row_result.validation_error - if (row_result.import_type != RowResult.IMPORT_TYPE_SKIP or - self._meta.report_skipped): + raise exceptions.ImportError( + row_result.validation_error, number=i, row=row + ) + if ( + row_result.import_type != RowResult.IMPORT_TYPE_SKIP + or self._meta.report_skipped + ): result.append_row_result(row_result) if self._meta.use_bulk: # bulk persist any instances which are still pending with atomic_if_using_transaction(using_transactions, using=db_connection): - self.bulk_create(using_transactions, dry_run, raise_errors) - self.bulk_update(using_transactions, dry_run, raise_errors) - self.bulk_delete(using_transactions, dry_run, raise_errors) + self.bulk_create( + using_transactions, dry_run, raise_errors, result=result + ) + self.bulk_update( + using_transactions, dry_run, raise_errors, result=result + ) + self.bulk_delete( + using_transactions, dry_run, raise_errors, result=result + ) try: with atomic_if_using_transaction(using_transactions, using=db_connection): - self.after_import(dataset, result, using_transactions, dry_run, **kwargs) + self.after_import(dataset, result, **kwargs) except Exception as e: - logger.debug(e, exc_info=e) - tb_info = traceback.format_exc() - result.append_base_error(self.get_error_result_class()(e, tb_info)) - if raise_errors: - raise - - if using_transactions: - if dry_run or \ - result.has_errors() or \ - (rollback_on_validation_errors and result.has_validation_errors()): - savepoint_rollback(sp1, using=db_connection) - else: - savepoint_commit(sp1, using=db_connection) + self.handle_import_error(result, e, raise_errors) return result + def get_import_order(self): + return self._get_ordered_field_names("import_order") + def get_export_order(self): - order = tuple(self._meta.export_order or ()) - return order + tuple(k for k in self.fields if k not in order) + return self._get_ordered_field_names("export_order") - def before_export(self, queryset, *args, **kwargs): - """ + def before_export(self, queryset, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param queryset: The queryset for export. + + :param \**kwargs: + Metadata which may be associated with the export. """ pass - def after_export(self, queryset, data, *args, **kwargs): - """ + def after_export(self, queryset, dataset, **kwargs): + r""" Override to add additional logic. Does nothing by default. + + :param queryset: The queryset for export. + + :param dataset: A ``tablib.Dataset``. + + :param \**kwargs: + Metadata which may be associated with the export. """ pass - def export_field(self, field, obj): - field_name = self.get_field_name(field) - method = getattr(self, 'dehydrate_%s' % field_name, None) - if method is not None: - return method(obj) - return field.export(obj) + def filter_export(self, queryset, **kwargs): + r""" + Override to filter an export queryset. - def get_export_fields(self): - return self.get_fields() + :param queryset: The queryset for export. + + :param \**kwargs: + Metadata which may be associated with the export. + + :returns: The filtered queryset. + """ + return queryset - def export_resource(self, obj): - return [self.export_field(field, obj) for field in self.get_export_fields()] + def export_field(self, field, instance, **kwargs): + field_name = self.get_field_name(field) + dehydrate_method = field.get_dehydrate_method(field_name) - def get_export_headers(self): - headers = [ - force_str(field.column_name) for field in self.get_export_fields()] - return headers + if callable(dehydrate_method): + method = dehydrate_method + else: + method = getattr(self, dehydrate_method, None) - def get_user_visible_headers(self): - headers = [ - force_str(field.column_name) for field in self.get_user_visible_fields()] - return headers + if method is not None: + return method(instance) + return field.export(instance, **kwargs) + + def get_export_fields(self, selected_fields=None): + fields_ = selected_fields if selected_fields else self.fields + export_fields = [] + export_order = self.get_export_order() + for field_name in export_order: + if field_name in fields_: + field = self._select_field(field_name) + if field is not None: + export_fields.append(field) + return export_fields + + def export_resource(self, instance, selected_fields=None, **kwargs): + export_fields = self.get_export_fields(selected_fields) + return [self.export_field(field, instance, **kwargs) for field in export_fields] + + def get_export_headers(self, selected_fields=None): + export_fields = self.get_export_fields(selected_fields) + return [force_str(field.column_name) for field in export_fields if field] def get_user_visible_fields(self): - return self.get_fields() + return self.get_import_fields() def iter_queryset(self, queryset): if not isinstance(queryset, QuerySet): @@ -916,106 +1076,118 @@ def iter_queryset(self, queryset): if not queryset.query.order_by: # Paginator() throws a warning if there is no sorting # attached to the queryset - queryset = queryset.order_by('pk') + queryset = queryset.order_by("pk") paginator = Paginator(queryset, self.get_chunk_size()) for index in range(paginator.num_pages): yield from paginator.get_page(index + 1) else: yield from queryset.iterator(chunk_size=self.get_chunk_size()) - def export(self, queryset=None, *args, **kwargs): + def export(self, queryset=None, **kwargs): """ Exports a resource. - """ - self.before_export(queryset, *args, **kwargs) + :param queryset: The queryset for export (optional). + + :returns: A ``tablib.Dataset``. + """ + self.before_export(queryset, **kwargs) if queryset is None: queryset = self.get_queryset() - headers = self.get_export_headers() - data = tablib.Dataset(headers=headers) + queryset = self.filter_export(queryset, **kwargs) + export_fields = kwargs.get("export_fields", None) + headers = self.get_export_headers(selected_fields=export_fields) + dataset = tablib.Dataset(headers=headers) for obj in self.iter_queryset(queryset): - data.append(self.export_resource(obj)) + r = self.export_resource(obj, selected_fields=export_fields, **kwargs) + dataset.append(r) - self.after_export(queryset, data, *args, **kwargs) + self.after_export(queryset, dataset, **kwargs) - return data + return dataset + def _select_field(self, target_field_name): + # select field from fields based on either declared name or column name + if target_field_name in self.fields: + return self.fields[target_field_name] -class ModelDeclarativeMetaclass(DeclarativeMetaclass): + for field_name, field in self.fields.items(): + if target_field_name == field.column_name: + return field + # it should have been possible to identify the declared field + # but warn if not + warn(f"cannot identify field for export with name '{target_field_name}'") - def __new__(cls, name, bases, attrs): - new_class = super().__new__(cls, name, bases, attrs) + def _get_ordered_field_names(self, order_field): + """ + Return a list of field names, respecting any defined ordering. + """ + # get any declared 'order' fields + order_fields = tuple(getattr(self._meta, order_field) or ()) + # get any defined fields + defined_fields = order_fields + tuple(getattr(self._meta, "fields") or ()) - opts = new_class._meta + order = [] + [order.append(f) for f in defined_fields if f not in order] + declared_fields = [] + for field_name, field in self.fields.items(): + if field_name not in order and field.column_name not in order: + declared_fields.append(field_name) + return tuple(order) + tuple(declared_fields) - if not opts.instance_loader_class: - opts.instance_loader_class = ModelInstanceLoader + def _is_using_transactions(self, kwargs): + return kwargs.get("using_transactions", False) - if opts.model: - model_opts = opts.model._meta - declared_fields = new_class.fields + def _is_dry_run(self, kwargs): + return kwargs.get("dry_run", False) - field_list = [] - for f in sorted(model_opts.fields + model_opts.many_to_many): - if opts.fields is not None and not f.name in opts.fields: - continue - if opts.exclude and f.name in opts.exclude: - continue - if f.name in declared_fields: - continue + def _check_import_id_fields(self, headers): + """ + Provides a safety check with a meaningful error message for cases where + the ``import_id_fields`` declaration contains a field which is not in the + dataset. For most use-cases this is an error, so we detect and raise. + There are conditions, such as 'dynamic fields' where this does not apply. + See issue 1834 for more information. + """ + import_id_fields = [] + missing_fields = [] + missing_headers = [] + + if self.get_import_id_fields() == ["id"]: + # this is the default case, so ok if not present + return - field = new_class.field_from_django_field(f.name, f, - readonly=False) - field_list.append((f.name, field, )) - - new_class.fields.update(OrderedDict(field_list)) - - # add fields that follow relationships - if opts.fields is not None: - field_list = [] - for field_name in opts.fields: - if field_name in declared_fields: - continue - if field_name.find('__') == -1: - continue - - model = opts.model - attrs = field_name.split('__') - for i, attr in enumerate(attrs): - verbose_path = ".".join([opts.model.__name__] + attrs[0:i+1]) - - try: - f = model._meta.get_field(attr) - except FieldDoesNotExist as e: - logger.debug(e, exc_info=e) - raise FieldDoesNotExist( - "%s: %s has no field named '%s'" % - (verbose_path, model.__name__, attr)) - - if i < len(attrs) - 1: - # We're not at the last attribute yet, so check - # that we're looking at a relation, and move on to - # the next model. - if isinstance(f, ForeignObjectRel): - model = get_related_model(f) - else: - if get_related_model(f) is None: - raise KeyError( - '%s is not a relation' % verbose_path) - model = get_related_model(f) - - if isinstance(f, ForeignObjectRel): - f = f.field - - field = new_class.field_from_django_field(field_name, f, - readonly=True) - field_list.append((field_name, field)) - - new_class.fields.update(OrderedDict(field_list)) - - return new_class + for field_name in self.get_import_id_fields(): + if field_name not in self.fields: + missing_fields.append(field_name) + else: + import_id_fields.append(self.fields[field_name]) + + if missing_fields: + raise exceptions.FieldError( + _( + "The following fields are declared in 'import_id_fields' but " + "are not present in the resource fields: %s" + % ", ".join(missing_fields) + ) + ) + + for field in import_id_fields: + if not headers or field.column_name not in headers: + # escape to be safe (exception could end up in logs) + col = escape(field.column_name) + missing_headers.append(col) + + if missing_headers: + raise exceptions.FieldError( + _( + "The following fields are declared in 'import_id_fields' but " + "are not present in the file headers: %s" + % ", ".join(missing_headers) + ) + ) class ModelResource(Resource, metaclass=ModelDeclarativeMetaclass): @@ -1026,25 +1198,28 @@ class ModelResource(Resource, metaclass=ModelDeclarativeMetaclass): DEFAULT_RESOURCE_FIELD = Field WIDGETS_MAP = { - 'ManyToManyField': 'get_m2m_widget', - 'OneToOneField': 'get_fk_widget', - 'ForeignKey': 'get_fk_widget', - 'DecimalField': widgets.DecimalWidget, - 'DateTimeField': widgets.DateTimeWidget, - 'DateField': widgets.DateWidget, - 'TimeField': widgets.TimeWidget, - 'DurationField': widgets.DurationWidget, - 'FloatField': widgets.FloatWidget, - 'IntegerField': widgets.IntegerWidget, - 'PositiveIntegerField': widgets.IntegerWidget, - 'BigIntegerField': widgets.IntegerWidget, - 'PositiveSmallIntegerField': widgets.IntegerWidget, - 'SmallIntegerField': widgets.IntegerWidget, - 'SmallAutoField': widgets.IntegerWidget, - 'AutoField': widgets.IntegerWidget, - 'BigAutoField': widgets.IntegerWidget, - 'NullBooleanField': widgets.BooleanWidget, - 'BooleanField': widgets.BooleanWidget, + "ManyToManyField": "get_m2m_widget", + "OneToOneField": "get_fk_widget", + "ForeignKey": "get_fk_widget", + "CharField": widgets.CharWidget, + "DecimalField": widgets.DecimalWidget, + "DateTimeField": widgets.DateTimeWidget, + "DateField": widgets.DateWidget, + "TimeField": widgets.TimeWidget, + "DurationField": widgets.DurationWidget, + "FloatField": widgets.FloatWidget, + "IntegerField": widgets.IntegerWidget, + "PositiveIntegerField": widgets.IntegerWidget, + "BigIntegerField": widgets.IntegerWidget, + "PositiveBigIntegerField": widgets.IntegerWidget, + "PositiveSmallIntegerField": widgets.IntegerWidget, + "SmallIntegerField": widgets.IntegerWidget, + "SmallAutoField": widgets.IntegerWidget, + "AutoField": widgets.IntegerWidget, + "BigAutoField": widgets.IntegerWidget, + "NullBooleanField": widgets.BooleanWidget, + "BooleanField": widgets.BooleanWidget, + "JSONField": widgets.JSONWidget, } @classmethod @@ -1053,17 +1228,26 @@ def get_m2m_widget(cls, field): Prepare widget for m2m field """ return functools.partial( - widgets.ManyToManyWidget, - model=get_related_model(field)) + widgets.ManyToManyWidget, model=get_related_model(field) + ) @classmethod def get_fk_widget(cls, field): """ Prepare widget for fk and o2o fields """ + + model = get_related_model(field) + + use_natural_foreign_keys = ( + has_natural_foreign_key(model) and cls._meta.use_natural_foreign_keys + ) + return functools.partial( widgets.ForeignKeyWidget, - model=get_related_model(field)) + model=model, + use_natural_foreign_keys=use_natural_foreign_keys, + ) @classmethod def widget_from_django_field(cls, f, default=widgets.Widget): @@ -1071,7 +1255,7 @@ def widget_from_django_field(cls, f, default=widgets.Widget): Returns the widget that would likely be associated with each Django type. - Includes mapping of Postgres Array and JSON fields. In the case that + Includes mapping of Postgres Array field. In the case that psycopg2 is not installed, we consume the error and process the field regardless. """ @@ -1085,35 +1269,44 @@ def widget_from_django_field(cls, f, default=widgets.Widget): if isinstance(result, str): result = getattr(cls, result)(f) else: + # issue 1804 + # The field class may be in a third party library as a subclass + # of a standard field class. + # iterate base classes to determine the correct widget class to use. + for base_class in f.__class__.__mro__: + if base_class.__name__ in cls.WIDGETS_MAP: + result = cls.WIDGETS_MAP[base_class.__name__] + if isinstance(result, str): + result = getattr(cls, result)(f) + break + try: from django.contrib.postgres.fields import ArrayField - try: - from django.db.models import JSONField - except ImportError: - from django.contrib.postgres.fields import JSONField except ImportError: # ImportError: No module named psycopg2.extras class ArrayField: pass - class JSONField: - pass - if isinstance(f, ArrayField): return widgets.SimpleArrayWidget - elif isinstance(f, JSONField): - return widgets.JSONWidget return result @classmethod - def widget_kwargs_for_field(self, field_name): + def widget_kwargs_for_field(cls, field_name, django_field): """ Returns widget kwargs for given field_name. """ - if self._meta.widgets: - return self._meta.widgets.get(field_name, {}) - return {} + widget_kwargs = {} + if cls._meta.widgets: + cls_kwargs = cls._meta.widgets.get(field_name, {}) + widget_kwargs.update(cls_kwargs) + if ( + issubclass(django_field.__class__, fields.CharField) + and django_field.blank is True + ): + widget_kwargs.update({"coerce_to_string": True, "allow_blank": True}) + return widget_kwargs @classmethod def field_from_django_field(cls, field_name, django_field, readonly): @@ -1122,10 +1315,22 @@ def field_from_django_field(cls, field_name, django_field, readonly): """ FieldWidget = cls.widget_from_django_field(django_field) - widget_kwargs = cls.widget_kwargs_for_field(field_name) + widget_kwargs = cls.widget_kwargs_for_field(field_name, django_field) + + attribute = field_name + column_name = field_name + # To solve #974 + if ( + isinstance(django_field, ForeignKey) + and "__" not in column_name + and not cls._meta.use_natural_foreign_keys + ): + attribute += "_id" + widget_kwargs["key_is_id"] = True + field = cls.DEFAULT_RESOURCE_FIELD( - attribute=field_name, - column_name=field_name, + attribute=attribute, + column_name=column_name, widget=FieldWidget(**widget_kwargs), readonly=readonly, default=django_field.default, @@ -1145,15 +1350,20 @@ def init_instance(self, row=None): """ return self._meta.model() - def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): + def after_import(self, dataset, result, **kwargs): """ Reset the SQL sequences after new objects are imported """ # Adapted from django's loaddata - if not dry_run and any(r.import_type == RowResult.IMPORT_TYPE_NEW for r in result.rows): + dry_run = self._is_dry_run(kwargs) + if not dry_run and any( + r.import_type == RowResult.IMPORT_TYPE_NEW for r in result.rows + ): db_connection = self.get_db_connection_name() connection = connections[db_connection] - sequence_sql = connection.ops.sequence_reset_sql(no_style(), [self._meta.model]) + sequence_sql = connection.ops.sequence_reset_sql( + no_style(), [self._meta.model] + ) if sequence_sql: cursor = connection.cursor() try: @@ -1162,18 +1372,24 @@ def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): finally: cursor.close() + @classmethod + def get_display_name(cls): + if hasattr(cls._meta, "name"): + return cls._meta.name + return cls.__name__ + def modelresource_factory(model, resource_class=ModelResource): """ Factory for creating ``ModelResource`` class for given Django model. """ - attrs = {'model': model} - Meta = type(str('Meta'), (object,), attrs) + attrs = {"model": model} + Meta = type("Meta", (object,), attrs) - class_name = model.__name__ + str('Resource') + class_name = model.__name__ + "Resource" class_attrs = { - 'Meta': Meta, + "Meta": Meta, } metaclass = ModelDeclarativeMetaclass diff --git a/import_export/results.py b/import_export/results.py index 9f6ac5d84..acbdec635 100644 --- a/import_export/results.py +++ b/import_export/results.py @@ -1,50 +1,158 @@ +import logging +import sys +import traceback from collections import OrderedDict from django.core.exceptions import NON_FIELD_ERRORS from django.utils.encoding import force_str +from django.utils.functional import cached_property +from django.utils.translation import gettext_lazy as _ from tablib import Dataset +logger = logging.getLogger(__name__) + class Error: - def __init__(self, error, traceback=None, row=None): + """ + Base class representing an Error arising from error during data import. + """ + + def __init__(self, error, row=None, number=None): + """ + :param error: Instance of an Exception class. + :param row: The row as a dict of fields and values (optional). + :param number: The row number (optional). + """ self.error = error - self.traceback = traceback self.row = row + self.number = number + + def __repr__(self): + result = f"= (3, 10): + lines = traceback.format_exception(self.error) + else: + lines = traceback.format_exception( + None, self.error, self.error.__traceback__ + ) + return "".join(lines) class RowResult: - IMPORT_TYPE_UPDATE = 'update' - IMPORT_TYPE_NEW = 'new' - IMPORT_TYPE_DELETE = 'delete' - IMPORT_TYPE_SKIP = 'skip' - IMPORT_TYPE_ERROR = 'error' - IMPORT_TYPE_INVALID = 'invalid' - - valid_import_types = frozenset([ - IMPORT_TYPE_NEW, - IMPORT_TYPE_UPDATE, - IMPORT_TYPE_DELETE, - IMPORT_TYPE_SKIP, - ]) + """Container for values relating to a row import.""" + + IMPORT_TYPE_UPDATE = "update" + IMPORT_TYPE_NEW = "new" + IMPORT_TYPE_DELETE = "delete" + IMPORT_TYPE_SKIP = "skip" + IMPORT_TYPE_ERROR = "error" + IMPORT_TYPE_INVALID = "invalid" + + valid_import_types = frozenset( + [ + IMPORT_TYPE_NEW, + IMPORT_TYPE_UPDATE, + IMPORT_TYPE_DELETE, + IMPORT_TYPE_SKIP, + ] + ) def __init__(self): + #: An instance of :class:`~import_export.results.Error` which may have been + #: raised during import. self.errors = [] + + #: Contains any ValidationErrors which may have been raised during import. self.validation_error = None + + #: A HTML representation of the difference between the 'original' and + #: 'updated' model instance. self.diff = None + + #: A string identifier which identifies what type of import was performed. self.import_type = None - self.raw_values = {} + + #: Retain the raw values associated with each imported row. + self.row_values = {} + + #: The instance id (used in Admin UI) self.object_id = None + + #: The object representation (used in Admin UI) self.object_repr = None + #: A reference to the model instance which was created, updated or deleted. + self.instance = None + + #: A reference to the model instance before updates were applied. + #: This value is only set for updates. + self.original = None + + def is_update(self): + """ + :return: ``True`` if import type is 'update', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_UPDATE + + def is_new(self): + """ + :return: ``True`` if import type is 'new', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_NEW + + def is_delete(self): + """ + :return: ``True`` if import type is 'delete', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_DELETE + + def is_skip(self): + """ + :return: ``True`` if import type is 'skip', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_SKIP + + def is_error(self): + """ + :return: ``True`` if import type is 'error', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_ERROR + + def is_invalid(self): + """ + :return: ``True`` if import type is 'invalid', otherwise ``False``. + """ + return self.import_type == RowResult.IMPORT_TYPE_INVALID + + def is_valid(self): + """ + :return: ``True`` if import type is not 'error' or 'invalid', otherwise + ``False``. + """ + return self.import_type in self.valid_import_types + def add_instance_info(self, instance): if instance is not None: # Add object info to RowResult (e.g. for LogEntry) self.object_id = getattr(instance, "pk", None) - self.object_repr = force_str(instance) + try: + self.object_repr = force_str(instance) + except Exception as e: + logger.debug(_("call to force_str() on instance failed: %s" % str(e))) class InvalidRow: - """A row that resulted in one or more ``ValidationError`` being raised during import.""" + """A row that resulted in one or more ``ValidationError`` + being raised during import.""" def __init__(self, number, validation_error, values): self.number = number @@ -59,7 +167,8 @@ def __init__(self, number, validation_error, values): def field_specific_errors(self): """Returns a dictionary of field-specific validation errors for this row.""" return { - key: value for key, value in self.error_dict.items() + key: value + for key, value in self.error_dict.items() if key != NON_FIELD_ERRORS } @@ -76,28 +185,51 @@ def error_count(self): count += len(error_list) return count + def __repr__(self): + return ( + f"<{type(self).__name__}(row={self.number}, " + f"error={self.error!r}, " + f"error_count={self.error_count})>" + ) + + +class ErrorRow: + """A row that resulted in one or more errors being raised during import.""" + + def __init__(self, number, errors): + #: The row number + self.number = number + #: A list of errors associated with the row + self.errors = errors + class Result: def __init__(self, *args, **kwargs): super().__init__() self.base_errors = [] self.diff_headers = [] - self.rows = [] # RowResults - self.invalid_rows = [] # InvalidRow + #: The rows associated with the result. + self.rows = [] + #: The collection of rows which had validation errors. + self.invalid_rows = [] + #: The collection of rows which had generic errors. + self.error_rows = [] + #: A custom Dataset containing only failed rows and associated errors. self.failed_dataset = Dataset() - self.totals = OrderedDict([(RowResult.IMPORT_TYPE_NEW, 0), - (RowResult.IMPORT_TYPE_UPDATE, 0), - (RowResult.IMPORT_TYPE_DELETE, 0), - (RowResult.IMPORT_TYPE_SKIP, 0), - (RowResult.IMPORT_TYPE_ERROR, 0), - (RowResult.IMPORT_TYPE_INVALID, 0)]) + self.totals = OrderedDict( + [ + (RowResult.IMPORT_TYPE_NEW, 0), + (RowResult.IMPORT_TYPE_UPDATE, 0), + (RowResult.IMPORT_TYPE_DELETE, 0), + (RowResult.IMPORT_TYPE_SKIP, 0), + (RowResult.IMPORT_TYPE_ERROR, 0), + (RowResult.IMPORT_TYPE_INVALID, 0), + ] + ) self.total_rows = 0 def valid_rows(self): - return [ - r for r in self.rows - if r.import_type in RowResult.valid_import_types - ] + return [r for r in self.rows if r.import_type in RowResult.valid_import_types] def append_row_result(self, row_result): self.rows.append(row_result) @@ -106,6 +238,7 @@ def append_base_error(self, error): self.base_errors.append(error) def add_dataset_headers(self, headers): + headers = [] if not headers else headers self.failed_dataset.headers = headers + ["Error"] def append_failed_row(self, row, error): @@ -124,13 +257,15 @@ def append_invalid_row(self, number, row, validation_error): InvalidRow(number=number, validation_error=validation_error, values=values) ) + def append_error_row(self, number, row, errors): + self.error_rows.append(ErrorRow(number=number, errors=errors)) + def increment_row_result_total(self, row_result): if row_result.import_type: self.totals[row_result.import_type] += 1 def row_errors(self): - return [(i + 1, row.errors) - for i, row in enumerate(self.rows) if row.errors] + return [(i + 1, row.errors) for i, row in enumerate(self.rows) if row.errors] def has_errors(self): """Returns a boolean indicating whether the import process resulted in diff --git a/import_export/static/import_export/action_formats.js b/import_export/static/import_export/action_formats.js deleted file mode 100644 index 9f0fe571c..000000000 --- a/import_export/static/import_export/action_formats.js +++ /dev/null @@ -1,22 +0,0 @@ -(function($) { - $(document).ready(function() { - var $actionsSelect, $formatsElement; - if ($('body').hasClass('grp-change-list')) { - // using grappelli - $actionsSelect = $('#grp-changelist-form select[name="action"]'); - $formatsElement = $('#grp-changelist-form select[name="file_format"]'); - } else { - // using default admin - $actionsSelect = $('#changelist-form select[name="action"]'); - $formatsElement = $('#changelist-form select[name="file_format"]').parent(); - } - $actionsSelect.change(function() { - if ($(this).val() === 'export_admin_action') { - $formatsElement.show(); - } else { - $formatsElement.hide(); - } - }); - $actionsSelect.change(); - }); -})(django.jQuery); diff --git a/import_export/static/import_export/export.css b/import_export/static/import_export/export.css new file mode 100644 index 000000000..4a42e7894 --- /dev/null +++ b/import_export/static/import_export/export.css @@ -0,0 +1,7 @@ +.selectable-field-export-row { + padding-left: 10px; +} + +.selectable-field-export-row > label{ + padding-left: 5px; +} diff --git a/import_export/static/import_export/export_selectable_fields.js b/import_export/static/import_export/export_selectable_fields.js new file mode 100644 index 000000000..551a6720c --- /dev/null +++ b/import_export/static/import_export/export_selectable_fields.js @@ -0,0 +1,45 @@ +function hideUnselectedResourceFields(selectedResourceIndex) { + const fields = document.querySelectorAll("[resource-index]"); + + fields.forEach((field) => { + if (field.getAttribute("resource-index") !== selectedResourceIndex.toString()) { + // field is wrapped by div, change visibility on wrapper + field.style.display = "none"; + } + }); +} + +function showSelectedResourceFields(resourceIndex) { + const fields = document.querySelectorAll(`[resource-index="${resourceIndex}"]`); + + fields.forEach((field) => { + // field is wrapped by div, change visibility on wrapper + field.style.display = "block"; + }); +} + +function onResourceSelected(e) { + const resourceIndex = e.target.value; + + showSelectedResourceFields(resourceIndex); + + hideUnselectedResourceFields(resourceIndex); +} + +document.addEventListener("DOMContentLoaded", () => { + const resourceSelector = document.querySelector("#id_resource"); + + if (!resourceSelector) { + console.error("resource select input not found"); + return; + } + + // If selector is actually select input, get selected option. + // else selected resource index is 0 + const selectedResourceIndex = resourceSelector.tagName === "SELECT" ? resourceSelector.value : 0; + + resourceSelector.addEventListener("input", onResourceSelected); + + // initially hide unselected resource fields + hideUnselectedResourceFields(selectedResourceIndex); +}); diff --git a/import_export/static/import_export/guess_format.js b/import_export/static/import_export/guess_format.js new file mode 100644 index 000000000..cd1635fc4 --- /dev/null +++ b/import_export/static/import_export/guess_format.js @@ -0,0 +1,21 @@ +(function($) { + $().ready(function () { + $('input.guess_format[type="file"]').change(function () { + var files = this.files; + var dropdowns = $(this.form).find('select.guess_format'); + if(files.length > 0) { + var extension = files[0].name.split('.').pop().trim().toLowerCase(); + for(var i = 0; i < dropdowns.length; i++) { + var dropdown = dropdowns[i]; + dropdown.selectedIndex = 0; + for(var j = 0; j < dropdown.options.length; j++) { + if(extension === dropdown.options[j].text.trim().toLowerCase()) { + dropdown.selectedIndex = j; + break; + } + } + } + } + }); + }); +})(django.jQuery); diff --git a/import_export/static/import_export/import.css b/import_export/static/import_export/import.css index bb20ba2a1..d457a7a16 100644 --- a/import_export/static/import_export/import.css +++ b/import_export/static/import_export/import.css @@ -26,22 +26,34 @@ z-index: 2; } -table.import-preview tr.skip { +html[data-theme="light"] .validation-error-container { + background-color: #ffc1c1; +} + +table.import-preview tr.skip, html[data-theme="light"] table.import-preview tr.skip { background-color: #d2d2d2; } -table.import-preview tr.new { +table.import-preview tr.new, html[data-theme="light"] table.import-preview tr.new { background-color: #bdd8b2; } -table.import-preview tr.delete { +table.import-preview tr.delete, html[data-theme="light"] table.import-preview tr.delete { background-color: #f9bebf; } -table.import-preview tr.update { +table.import-preview tr.update, html[data-theme="light"] table.import-preview tr.update { background-color: #fdfdcf; } +table.import-preview td ins, html[data-theme="light"] table.import-preview td ins { + background-color: #e6ffe6 !important; +} + +html[data-theme="light"] table.import-preview td del { + background-color: #ffe6e6 !important; +} + .import-preview td:hover .validation-error-count { z-index: 3; } @@ -79,3 +91,69 @@ table.import-preview tr.update { font-weight: bold; font-size: 0.85em; } + +@media (prefers-color-scheme: dark) { + table.import-preview tr.skip { + background-color: #2d2d2d; + } + + table.import-preview tr.new { + background-color: #42274d; + } + + table.import-preview tr.delete { + background-color: #064140; + } + + table.import-preview tr.update { + background-color: #020230; + } + + .validation-error-container { + background-color: #003e3e; + } + + /* + these declarations are necessary to forcibly override the + formatting applied by the diff-match-patch python library + */ + table.import-preview td ins { + background-color: #190019 !important; + } + + table.import-preview td del { + background-color: #001919 !important; + } +} + +html[data-theme="dark"] table.import-preview tr.skip { + background-color: #2d2d2d; +} + +html[data-theme="dark"] table.import-preview tr.new { + background-color: #42274d; +} + +html[data-theme="dark"] table.import-preview tr.delete { + background-color: #064140; +} + +html[data-theme="dark"] table.import-preview tr.update { + background-color: #020230; +} + +html[data-theme="dark"] .validation-error-container { + background-color: #003e3e; +} + +/* +these declarations are necessary to forcibly override the +formatting applied by the diff-match-patch python library + */ +html[data-theme="dark"] table.import-preview td ins { + background-color: #190019 !important; +} + +html[data-theme="dark"] table.import-preview td del { + background-color: #001919 !important; +} diff --git a/import_export/templates/admin/import_export/base.html b/import_export/templates/admin/import_export/base.html index 0aadf18c0..8e19e3e78 100644 --- a/import_export/templates/admin/import_export/base.html +++ b/import_export/templates/admin/import_export/base.html @@ -8,7 +8,7 @@ {% if not is_popup %} {% block breadcrumbs %} - {% endfor %} - +
+ {{ form.non_field_errors }} +
-
- -
+
+ +
{% endblock %} diff --git a/import_export/templates/admin/import_export/import.html b/import_export/templates/admin/import_export/import.html index f791c790d..d8fd9b397 100644 --- a/import_export/templates/admin/import_export/import.html +++ b/import_export/templates/admin/import_export/import.html @@ -6,59 +6,82 @@ {% block extrastyle %}{{ block.super }}{% endblock %} +{% block extrahead %}{{ block.super }} + + {% if confirm_form %} + {{ confirm_form.media }} + {% else %} + {{ form.media }} + {% endif %} +{% endblock %} + {% block breadcrumbs_last %} -{% trans "Import" %} +{% translate "Import" %} {% endblock %} {% block content %} {% if confirm_form %} + {% block confirm_import_form %}
{% csrf_token %} {{ confirm_form.as_p }}

- {% trans "Below is a preview of data to be imported. If you are satisfied with the results, click 'Confirm import'" %} + {% translate "Below is a preview of data to be imported. If you are satisfied with the results, click 'Confirm import'" %}

- +
+ {% endblock %} {% else %} + {% block import_form %}
{% csrf_token %} -

- {% trans "This importer will import the following fields: " %} - {{ fields|join:", " }} -

- -
- {% for field in form %} -
- {{ field.errors }} + {% include "admin/import_export/resource_fields_list.html" with import_or_export="import" %} + {% block import_form_additional_info %}{% endblock %} - {{ field.label_tag }} + {% block form_detail %} +
+ {% for field in form.visible_fields %} +
+ {{ field.errors }} - {{ field }} + {{ field.label_tag }} - {% if field.field.help_text %} -

{{ field.field.help_text|safe }}

- {% endif %} -
- {% endfor %} -
+ {% if field.field.widget.attrs.readonly %} + {{ field.field.value }} + {{ field.as_hidden }} + {% else %} + {{ field }} + {% endif %} -
- -
+ {% if field.field.help_text %} +

{{ field.field.help_text|safe }}

+ {% endif %} +
+ {% endfor %} + {% for field in form.hidden_fields %} + {{ field }} + {% endfor %} +
+ {% endblock %} + + {% block form_submit_button %} +
+ +
+ {% endblock %}
+ {% endblock %} {% endif %} {% if result %} {% if result.has_errors %} - -

{% trans "Errors" %}

+ {% block errors %} +

{% translate "Errors" %}

    {% for error in result.base_errors %}
  • @@ -66,28 +89,40 @@

    {% trans "Errors" %}

    {{ error.traceback|linebreaks }}
  • {% endfor %} + {% block import_error_list %} {% for line, errors in result.row_errors %} {% for error in errors %} -
  • - {% trans "Line number" %}: {{ line }} - {{ error.error }} -
    {{ error.row.values|join:", " }}
    -
    {{ error.traceback|linebreaks }}
    + {% block import_error_list_item %} +
  • + {% if "message" in import_error_display %} +
    {% translate "Line number" %}: {{ line }} - {{ error.error }}
    + {% endif %} + {% if "row" in import_error_display %} +
    {{ error.row.values|join:", " }}
    + {% endif %} + {% if "traceback" in import_error_display %} +
    {{ error.traceback|linebreaks }}
    + {% endif %}
  • + {% endblock %} {% endfor %} {% endfor %} + {% endblock %}
+ {% endblock %} {% elif result.has_validation_errors %} -

{% trans "Some rows failed to validate" %}

- -

{% trans "Please correct these errors in your data where possible, then reupload it using the form above." %}

- + {% block validation_errors %} +

{% translate "Some rows failed to validate" %}

+ +

{% translate "Please correct these errors in your data where possible, then reupload it using the form above." %}

+ - - + + {% for field in result.diff_headers %} {% endfor %} @@ -98,7 +133,7 @@

{% trans "Some rows failed to validate" %}

{% trans "Row" %}{% trans "Errors" %}{% translate "Row" %}{% translate "Errors" %}{{ field }}
{{ row.number }} - {{ row.error_count }} + {{ row.error_count }}
    {% for field_name, error_list in row.field_specific_errors.items %} @@ -113,7 +148,7 @@

    {% trans "Some rows failed to validate" %}

    {% endfor %} {% if row.non_field_specific_errors %}
  • - {% trans "Non field specific" %} + {% translate "Non field specific" %}
      {% for error in row.non_field_specific_errors %}
    • {{ error }}
    • @@ -131,10 +166,12 @@

      {% trans "Some rows failed to validate" %}

      {% endfor %}
- + {% endblock %} + {% else %} -

{% trans "Preview" %}

+ {% block preview %} +

{% translate "Preview" %}

@@ -149,13 +186,13 @@

{% trans "Preview" %}

{% for field in row.diff %} @@ -164,6 +201,7 @@

{% trans "Preview" %}

{% endfor %}
{% if row.import_type == 'new' %} - {% trans "New" %} + {% translate "New" %} {% elif row.import_type == 'skip' %} - {% trans "Skipped" %} + {% translate "Skipped" %} {% elif row.import_type == 'delete' %} - {% trans "Delete" %} + {% translate "Delete" %} {% elif row.import_type == 'update' %} - {% trans "Update" %} + {% translate "Update" %} {% endif %}
+ {% endblock %} {% endif %} diff --git a/import_export/templates/admin/import_export/resource_fields_list.html b/import_export/templates/admin/import_export/resource_fields_list.html new file mode 100644 index 000000000..db28d76f1 --- /dev/null +++ b/import_export/templates/admin/import_export/resource_fields_list.html @@ -0,0 +1,21 @@ +{% load i18n %} +{% block fields_help %} +

+ {% if import_or_export == "export" %} + {% translate "This exporter will export the following fields: " %} + {% elif import_or_export == "import" %} + {% translate "This importer will import the following fields: " %} + {% endif %} + + {% if fields_list|length <= 1 %} + {{ fields_list.0.1|join:", " }} + {% else %} +

+ {% for resource, fields in fields_list %} +
{{ resource }}
+
{{ fields|join:", " }}
+ {% endfor %} +
+ {% endif %} +

+{% endblock %} diff --git a/import_export/tmp_storages.py b/import_export/tmp_storages.py index e732218f8..dfbcb5f69 100644 --- a/import_export/tmp_storages.py +++ b/import_export/tmp_storages.py @@ -4,18 +4,18 @@ from django.core.cache import cache from django.core.files.base import ContentFile -from django.core.files.storage import default_storage class BaseStorage: + def __init__(self, **kwargs): + self.name = kwargs.get("name", None) + self.read_mode = kwargs.get("read_mode", "r") + self.encoding = kwargs.get("encoding", None) - def __init__(self, name=None): - self.name = name - - def save(self, data, mode='w'): + def save(self, data): raise NotImplementedError - def read(self, read_mode='r'): + def read(self): raise NotImplementedError def remove(self): @@ -23,69 +23,82 @@ def remove(self): class TempFolderStorage(BaseStorage): - - def open(self, mode='r'): - if self.name: - return open(self.get_full_path(), mode) - else: - tmp_file = tempfile.NamedTemporaryFile(delete=False) - self.name = tmp_file.name - return tmp_file - - def save(self, data, mode='w'): - with self.open(mode=mode) as file: + def save(self, data): + with self._open(mode="w") as file: file.write(data) - def read(self, mode='r'): - with self.open(mode=mode) as file: + def read(self): + with self._open(mode=self.read_mode) as file: return file.read() def remove(self): os.remove(self.get_full_path()) def get_full_path(self): - return os.path.join( - tempfile.gettempdir(), - self.name - ) + return os.path.join(tempfile.gettempdir(), self.name) + + def _open(self, mode="r"): + if self.name: + return open(self.get_full_path(), mode, encoding=self.encoding) + else: + tmp_file = tempfile.NamedTemporaryFile(delete=False) + self.name = tmp_file.name + return tmp_file class CacheStorage(BaseStorage): """ By default memcache maximum size per key is 1MB, be careful with large files. """ + CACHE_LIFETIME = 86400 - CACHE_PREFIX = 'django-import-export-' + CACHE_PREFIX = "django-import-export-" - def save(self, data, mode=None): + def save(self, data): if not self.name: self.name = uuid4().hex cache.set(self.CACHE_PREFIX + self.name, data, self.CACHE_LIFETIME) - def read(self, read_mode='r'): + def read(self): return cache.get(self.CACHE_PREFIX + self.name) def remove(self): - cache.delete(self.name) + cache.delete(self.CACHE_PREFIX + self.name) class MediaStorage(BaseStorage): - MEDIA_FOLDER = 'django-import-export' + _storage = None + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._configure_storage() + self.MEDIA_FOLDER = kwargs.get("MEDIA_FOLDER", "django-import-export") - def save(self, data, mode=None): + # issue 1589 - Ensure that for MediaStorage, we read in binary mode + kwargs.update({"read_mode": "rb"}) + super().__init__(**kwargs) + + def _configure_storage(self): + from django.core.files.storage import StorageHandler + + sh = StorageHandler() + self._storage = ( + sh["import_export"] if "import_export" in sh.backends else sh["default"] + ) + + def save(self, data): if not self.name: self.name = uuid4().hex - default_storage.save(self.get_full_path(), ContentFile(data)) + self._storage.save(self.get_full_path(), ContentFile(data)) - def read(self, read_mode='rb'): - with default_storage.open(self.get_full_path(), mode=read_mode) as f: + def read(self): + with self._storage.open(self.get_full_path(), mode=self.read_mode) as f: return f.read() def remove(self): - default_storage.delete(self.get_full_path()) + self._storage.delete(self.get_full_path()) def get_full_path(self): - return os.path.join( - self.MEDIA_FOLDER, - self.name - ) + if self.MEDIA_FOLDER is not None: + return os.path.join(self.MEDIA_FOLDER, self.name) + return self.name diff --git a/import_export/utils.py b/import_export/utils.py index f7f019791..1a9760443 100644 --- a/import_export/utils.py +++ b/import_export/utils.py @@ -11,6 +11,7 @@ class atomic_if_using_transaction: return something() return something() """ + def __init__(self, using_transactions, using): self.using_transactions = using_transactions if using_transactions: @@ -23,3 +24,8 @@ def __enter__(self): def __exit__(self, *args): if self.using_transactions: self.context_manager.__exit__(*args) + + +def get_related_model(field): + if hasattr(field, "related_model"): + return field.related_model diff --git a/import_export/widgets.py b/import_export/widgets.py index 04662e78b..5e0c0469f 100644 --- a/import_export/widgets.py +++ b/import_export/widgets.py @@ -1,6 +1,9 @@ import json -from datetime import date, datetime, time +import logging +import numbers +from datetime import date, datetime, time, timedelta from decimal import Decimal +from warnings import warn import django from django.conf import settings @@ -8,52 +11,105 @@ from django.utils import timezone from django.utils.dateparse import parse_duration from django.utils.encoding import force_str, smart_str +from django.utils.formats import number_format, sanitize_separators +from django.utils.translation import gettext_lazy as _ + +from import_export.exceptions import WidgetError + +logger = logging.getLogger(__name__) def format_datetime(value, datetime_format): - # conditional logic to handle correct formatting of dates + # handle correct formatting of dates # see https://code.djangoproject.com/ticket/32738 - if django.VERSION[0] >= 4: - format = django.utils.formats.sanitize_strftime_format(datetime_format) - return value.strftime(format) - else: - return django.utils.datetime_safe.new_datetime(value).strftime(datetime_format) + format_ = django.utils.formats.sanitize_strftime_format(datetime_format) + return value.strftime(format_) + + +class _ParseDateTimeMixin: + """Internal Mixin for shared logic with date and datetime conversions.""" + + def __init__( + self, + format=None, + input_formats=None, + default_format="%Y-%m-%d", + coerce_to_string=True, + ): + super().__init__(coerce_to_string=coerce_to_string) + self.formats = (format,) if format else (input_formats or (default_format,)) + + def _parse_value(self, value, value_type): + """Attempt to parse the value using the provided formats. + Raise ValueError if parsing fails.""" + if not value: + return None + if isinstance(value, value_type): + return value + + for format_ in self.formats: + try: + parsed_date = datetime.strptime(value, format_) + if value_type is date: + return parsed_date.date() + if value_type is time: + return parsed_date.time() + return parsed_date + except (ValueError, TypeError) as e: + logger.debug(str(e)) + raise ValueError("Value could not be parsed using defined formats.") class Widget: """ - A Widget takes care of converting between import and export representations. - - This is achieved by the two methods, - :meth:`~import_export.widgets.Widget.clean` and - :meth:`~import_export.widgets.Widget.render`. + A Widget handles converting between import and export representations. """ - def clean(self, value, row=None, *args, **kwargs): + + def __init__(self, coerce_to_string=True): """ - Returns an appropriate Python object for an imported value. + :param coerce_to_string: If True, :meth:`~import_export.widgets.Widget.render` + will return a string representation of the value, otherwise the value is + returned. + """ + self.coerce_to_string = coerce_to_string - For example, if you import a value from a spreadsheet, - :meth:`~import_export.widgets.Widget.clean` handles conversion - of this value into the corresponding Python object. + def clean(self, value, row=None, **kwargs): + """ + Returns an appropriate python object for an imported value. + For example, a date string will be converted to a python datetime instance. - Numbers or dates can be *cleaned* to their respective data types and - don't have to be imported as Strings. + :param value: The value to be converted to a native type. + :param row: A dict containing row key/value pairs. + :param **kwargs: Optional kwargs. """ return value - def render(self, value, obj=None): + def render(self, value, obj=None, **kwargs): """ - Returns an export representation of a Python value. + Returns an export representation of a python value. - For example, if you have an object you want to export, - :meth:`~import_export.widgets.Widget.render` takes care of converting - the object's field to a value that can be written to a spreadsheet. + :param value: The python value to be rendered. + :param obj: The model instance from which the value is taken. + This parameter is deprecated and will be removed in a future release. + + :return: By default, this value will be a string, with ``None`` values returned + as empty strings. """ - return force_str(value) + return force_str(value) if value is not None else "" + + def _obj_deprecation_warning(self, obj): + if obj is not None: + warn( + "The 'obj' parameter is deprecated and will be removed " + "in a future release", + DeprecationWarning, + stacklevel=2, + ) class NumberWidget(Widget): """ + Widget for converting numeric fields. """ def is_empty(self, value): @@ -62,19 +118,26 @@ def is_empty(self, value): # 0 is not empty return value is None or value == "" - def render(self, value, obj=None): + def render(self, value, obj=None, **kwargs): + self._obj_deprecation_warning(obj) + if self.coerce_to_string and not kwargs.get("force_native_type"): + return ( + "" + if value is None or not isinstance(value, numbers.Number) + else "" + number_format(value) + ) return value class FloatWidget(NumberWidget): """ - Widget for converting floats fields. + Widget for converting float fields. """ - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): if self.is_empty(value): return None - return float(value) + return float(sanitize_separators(value)) class IntegerWidget(NumberWidget): @@ -82,10 +145,10 @@ class IntegerWidget(NumberWidget): Widget for converting integer fields. """ - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): if self.is_empty(value): return None - return int(Decimal(value)) + return int(Decimal(sanitize_separators(value))) class DecimalWidget(NumberWidget): @@ -93,19 +156,37 @@ class DecimalWidget(NumberWidget): Widget for converting decimal fields. """ - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): if self.is_empty(value): return None - return Decimal(force_str(value)) + return Decimal(force_str(sanitize_separators(value))) class CharWidget(Widget): """ Widget for converting text fields. + + :param allow_blank: If True, then :meth:`~import_export.widgets.Widget.clean` + will return null values as empty strings, otherwise as ``None``. """ - def render(self, value, obj=None): - return force_str(value) + def __init__(self, coerce_to_string=True, allow_blank=True): + """ """ + self.allow_blank = allow_blank + super().__init__(coerce_to_string) + + def clean(self, value, row=None, **kwargs): + val = super().clean(value, row, **kwargs) + if val is None: + return "" if self.allow_blank is True else None + return force_str(val) + + def render(self, value, obj=None, **kwargs): + # FIXME - how are nulls exported to XLSX + self._obj_deprecation_warning(obj) + if self.coerce_to_string: + return "" if value is None else force_str(value) + return value class BooleanWidget(Widget): @@ -129,145 +210,141 @@ class BooleanWidget(Widget): class BooleanExample(resources.ModelResource): warn = fields.Field(widget=widgets.BooleanWidget()) - def before_import_row(self, row, row_number=None, **kwargs): + def before_import_row(self, row, **kwargs): if "warn" in row.keys(): # munge "warn" to "True" if row["warn"] in ["warn", "WARN"]: row["warn"] = True - return super().before_import_row(row, row_number, **kwargs) + return super().before_import_row(row, **kwargs) """ + TRUE_VALUES = ["1", 1, True, "true", "TRUE", "True"] FALSE_VALUES = ["0", 0, False, "false", "FALSE", "False"] NULL_VALUES = ["", None, "null", "NULL", "none", "NONE", "None"] - def render(self, value, obj=None): - """ - On export, ``True`` is represented as ``1``, ``False`` as ``0``, and - ``None``/NULL as a empty string. - - Note that these values are also used on the import confirmation view. - """ - if value in self.NULL_VALUES: - return "" - return self.TRUE_VALUES[0] if value else self.FALSE_VALUES[0] + def __init__(self, coerce_to_string=True): + """ """ + super().__init__(coerce_to_string) - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): if value in self.NULL_VALUES: return None return True if value in self.TRUE_VALUES else False + def render(self, value, obj=None, **kwargs): + """ + :return: ``True`` is represented as ``1``, ``False`` as ``0``, and + ``None``/NULL as an empty string. + + If ``coerce_to_string`` is ``False``, the python Boolean type is + returned (may be ``None``). + """ + self._obj_deprecation_warning(obj) + if self.coerce_to_string and not kwargs.get("force_native_type"): + if value in self.NULL_VALUES or not type(value) is bool: + return "" + return self.TRUE_VALUES[0] if value else self.FALSE_VALUES[0] + return value + -class DateWidget(Widget): +class DateWidget(_ParseDateTimeMixin, Widget): """ - Widget for converting date fields. + Widget for converting date fields to Python date instances. - Takes optional ``format`` parameter. + Takes optional ``format`` parameter. If none is set, either + ``settings.DATE_INPUT_FORMATS`` or ``"%Y-%m-%d"`` is used. """ - def __init__(self, format=None): - if format is None: - if not settings.DATE_INPUT_FORMATS: - formats = ("%Y-%m-%d",) - else: - formats = settings.DATE_INPUT_FORMATS - else: - formats = (format,) - self.formats = formats + def __init__(self, format=None, coerce_to_string=True): + super().__init__( + format, settings.DATE_INPUT_FORMATS, "%Y-%m-%d", coerce_to_string + ) - def clean(self, value, row=None, *args, **kwargs): - if not value: - return None - if isinstance(value, date): - return value - for format in self.formats: - try: - return datetime.strptime(value, format).date() - except (ValueError, TypeError): - continue - raise ValueError("Enter a valid date.") + def clean(self, value, row=None, **kwargs): + """ + :returns: A python date instance. + :raises: ValueError if the value cannot be parsed using defined formats. + """ + return self._parse_value(value, date) - def render(self, value, obj=None): - if not value: + def render(self, value, obj=None, **kwargs): + self._obj_deprecation_warning(obj) + if self.coerce_to_string is False or kwargs.get("force_native_type"): + return value + if not value or not isinstance(value, date): return "" return format_datetime(value, self.formats[0]) -class DateTimeWidget(Widget): +class DateTimeWidget(_ParseDateTimeMixin, Widget): """ - Widget for converting date fields. + Widget for converting datetime fields to Python datetime instances. Takes optional ``format`` parameter. If none is set, either ``settings.DATETIME_INPUT_FORMATS`` or ``"%Y-%m-%d %H:%M:%S"`` is used. """ - def __init__(self, format=None): - if format is None: - if not settings.DATETIME_INPUT_FORMATS: - formats = ("%Y-%m-%d %H:%M:%S",) - else: - formats = settings.DATETIME_INPUT_FORMATS - else: - formats = (format,) - self.formats = formats + def __init__(self, format=None, coerce_to_string=True): + super().__init__( + format, + settings.DATETIME_INPUT_FORMATS, + "%Y-%m-%d %H:%M:%S", + coerce_to_string, + ) - def clean(self, value, row=None, *args, **kwargs): - if not value: + def clean(self, value, row=None, **kwargs): + """ + :returns: A python datetime instance. + :raises: ValueError if the value cannot be parsed using defined formats. + """ + dt = self._parse_value(value, datetime) + if dt is None: return None - if isinstance(value, datetime): - return value - for format in self.formats: - try: - dt = datetime.strptime(value, format) - if settings.USE_TZ: - # make datetime timezone aware so we don't compare - # naive datetime to an aware one - dt = timezone.make_aware(dt, - timezone.get_default_timezone()) - return dt - except (ValueError, TypeError): - continue - raise ValueError("Enter a valid date/time.") - - def render(self, value, obj=None): - if not value: + if settings.USE_TZ and timezone.is_naive(dt): + return timezone.make_aware(dt) + return dt + + def render(self, value, obj=None, **kwargs): + self._obj_deprecation_warning(obj) + if not value or not isinstance(value, datetime): return "" if settings.USE_TZ: value = timezone.localtime(value) + + force_native_type = kwargs.get("force_native_type") + if self.coerce_to_string is False or force_native_type: + # binary formats such as xlsx must not have tz set + return value.replace(tzinfo=None) if force_native_type else value + return format_datetime(value, self.formats[0]) -class TimeWidget(Widget): +class TimeWidget(_ParseDateTimeMixin, Widget): """ Widget for converting time fields. - Takes optional ``format`` parameter. + Takes optional ``format`` parameter. If none is set, either + ``settings.DATETIME_INPUT_FORMATS`` or ``"%H:%M:%S"`` is used. """ - def __init__(self, format=None): - if format is None: - if not settings.TIME_INPUT_FORMATS: - formats = ("%H:%M:%S",) - else: - formats = settings.TIME_INPUT_FORMATS - else: - formats = (format,) - self.formats = formats + def __init__(self, format=None, coerce_to_string=True): + super().__init__( + format, settings.TIME_INPUT_FORMATS, "%H:%M:%S", coerce_to_string + ) - def clean(self, value, row=None, *args, **kwargs): - if not value: - return None - if isinstance(value, time): - return value - for format in self.formats: - try: - return datetime.strptime(value, format).time() - except (ValueError, TypeError): - continue - raise ValueError("Enter a valid time.") + def clean(self, value, row=None, **kwargs): + """ + :returns: A python time instance. + :raises: ValueError if the value cannot be parsed using defined formats. + """ + return self._parse_value(value, time) - def render(self, value, obj=None): - if not value: + def render(self, value, obj=None, **kwargs): + self._obj_deprecation_warning(obj) + if self.coerce_to_string is False or kwargs.get("force_native_type"): + return value + if not value or not isinstance(value, time): return "" return value.strftime(self.formats[0]) @@ -277,17 +354,25 @@ class DurationWidget(Widget): Widget for converting time duration fields. """ - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): + """ + :returns: A python duration instance. + :raises: ValueError if the value cannot be parsed. + """ if not value: return None try: return parse_duration(value) - except (ValueError, TypeError): - raise ValueError("Enter a valid duration.") + except (ValueError, TypeError) as e: + logger.debug(str(e)) + raise ValueError(_("Value could not be parsed.")) - def render(self, value, obj=None): - if value is None: + def render(self, value, obj=None, **kwargs): + self._obj_deprecation_warning(obj) + if self.coerce_to_string is False or kwargs.get("force_native_type"): + return value + if value is None or not type(value) is timedelta: return "" return str(value) @@ -299,48 +384,67 @@ class SimpleArrayWidget(Widget): :param separator: Defaults to ``','`` """ - def __init__(self, separator=None): + def __init__(self, separator=None, coerce_to_string=True): if separator is None: - separator = ',' + separator = "," self.separator = separator - super().__init__() + super().__init__(coerce_to_string=coerce_to_string) - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): return value.split(self.separator) if value else [] - def render(self, value, obj=None): + def render(self, value, obj=None, **kwargs): + """ + :return: A string with values separated by ``separator``. + If ``coerce_to_string`` is ``False``, the native array will be returned. + If ``value`` is None, None will be returned if ``coerce_to_string`` + is ``False``, otherwise an empty string will be returned. + """ + self._obj_deprecation_warning(obj) + if value is None: + return "" if self.coerce_to_string is True else None + if not self.coerce_to_string: + return value return self.separator.join(str(v) for v in value) class JSONWidget(Widget): """ - Widget for a JSON object (especially required for jsonb fields in PostgreSQL database.) + Widget for a JSON object + (especially required for jsonb fields in PostgreSQL database.) :param value: Defaults to JSON format. The widget covers two cases: Proper JSON string with double quotes, else it tries to use single quotes and then convert it to proper JSON. """ - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): val = super().clean(value) if val: try: return json.loads(val) except json.decoder.JSONDecodeError: - return json.loads(val.replace("'", "\"")) + return json.loads(val.replace("'", '"')) - def render(self, value, obj=None): + def render(self, value, obj=None, **kwargs): + """ + :return: A JSON formatted string derived from ``value``. + ``coerce_to_string`` has no effect on the return value. + """ + self._obj_deprecation_warning(obj) if value: return json.dumps(value) + return None class ForeignKeyWidget(Widget): """ Widget for a ``ForeignKey`` field which looks up a related model using - "natural keys" in both export and import. + either the PK or a user specified field that uniquely identifies the + instance in both export and import. The lookup field defaults to using the primary key (``pk``) as lookup - criterion but can be customised to use any field on the related model. + criterion but can be customized to use any field on the related model. Unlike specifying a related field in your resource like so… @@ -370,12 +474,29 @@ class Meta: fields = ('author',) :param model: The Model the ForeignKey refers to (required). - :param field: A field on the related model used for looking up a particular object. + :param field: A field on the related model used for looking up a particular + object. + :param use_natural_foreign_keys: Use natural key functions to identify + related object, default to False """ - def __init__(self, model, field='pk', *args, **kwargs): + + def __init__( + self, + model, + field="pk", + use_natural_foreign_keys=False, + key_is_id=False, + **kwargs, + ): self.model = model self.field = field - super().__init__(*args, **kwargs) + self.key_is_id = key_is_id + self.use_natural_foreign_keys = use_natural_foreign_keys + if use_natural_foreign_keys is True and key_is_id is True: + raise WidgetError( + _("use_natural_foreign_keys and key_is_id cannot both be True") + ) + super().__init__(**kwargs) def get_queryset(self, value, row, *args, **kwargs): """ @@ -384,15 +505,19 @@ def get_queryset(self, value, row, *args, **kwargs): Overwrite this method if you want to limit the pool of objects from which the related object is retrieved. - :param value: The field's value in the datasource. - :param row: The datasource's current row. + :param value: The field's value in the dataset. + :param row: The dataset's current row. + :param \\*args: + Optional args. + :param \\**kwargs: + Optional kwargs. As an example; if you'd like to have ForeignKeyWidget look up a Person by their pre- **and** lastname column, you could subclass the widget like so:: class FullNameForeignKeyWidget(ForeignKeyWidget): - def get_queryset(self, value, row): + def get_queryset(self, value, row, *args, **kwargs): return self.model.objects.filter( first_name__iexact=row["first_name"], last_name__iexact=row["last_name"] @@ -400,21 +525,66 @@ def get_queryset(self, value, row): """ return self.model.objects.all() - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): + """ + :return: a single Foreign Key instance derived from the args. + ``None`` can be returned if the value passed is a null value. + + :param value: The field's value in the dataset. + :param row: The dataset's current row. + :param \\**kwargs: + Optional kwargs. + :raises: ``ObjectDoesNotExist`` if no valid instance can be found. + """ val = super().clean(value) if val: - return self.get_queryset(value, row, *args, **kwargs).get(**{self.field: val}) + if self.use_natural_foreign_keys: + # natural keys will always be a tuple, which ends up as a json list. + value = json.loads(value) + return self.model.objects.get_by_natural_key(*value) + else: + lookup_kwargs = self.get_lookup_kwargs(value, row, **kwargs) + obj = self.get_queryset(value, row, **kwargs).get(**lookup_kwargs) + if self.key_is_id: + return obj.pk + return obj else: return None - def render(self, value, obj=None): + def get_lookup_kwargs(self, value, row, **kwargs): + """ + :return: the key value pairs used to identify a model instance. + Override this to customize instance lookup. + + :param value: The field's value in the dataset. + :param row: The dataset's current row. + :param \\**kwargs: + Optional kwargs. + """ + return {self.field: value} + + def render(self, value, obj=None, **kwargs): + """ + :return: A string representation of the related value. + If ``use_natural_foreign_keys``, the value's natural key is returned. + ``coerce_to_string`` has no effect on the return value. + """ + self._obj_deprecation_warning(obj) + + if self.key_is_id: + return value or "" + if value is None: return "" - attrs = self.field.split('__') + attrs = self.field.split("__") for attr in attrs: try: - value = getattr(value, attr, None) + if self.use_natural_foreign_keys: + # inbound natural keys must be a json list. + return json.dumps(value.natural_key()) + else: + value = getattr(value, attr, None) except (ValueError, ObjectDoesNotExist): # needs to have a primary key value before a many-to-many # relationship can be used. @@ -435,17 +605,17 @@ class ManyToManyWidget(Widget): :param field: A field on the related model. Default is ``pk``. """ - def __init__(self, model, separator=None, field=None, *args, **kwargs): + def __init__(self, model, separator=None, field=None, **kwargs): if separator is None: - separator = ',' + separator = "," if field is None: - field = 'pk' + field = "pk" self.model = model self.separator = separator self.field = field - super().__init__(*args, **kwargs) + super().__init__(**kwargs) - def clean(self, value, row=None, *args, **kwargs): + def clean(self, value, row=None, **kwargs): if not value: return self.model.objects.none() if isinstance(value, (float, int)): @@ -453,10 +623,16 @@ def clean(self, value, row=None, *args, **kwargs): else: ids = value.split(self.separator) ids = filter(None, [i.strip() for i in ids]) - return self.model.objects.filter(**{ - '%s__in' % self.field: ids - }) + return self.model.objects.filter(**{"%s__in" % self.field: ids}) - def render(self, value, obj=None): - ids = [smart_str(getattr(obj, self.field)) for obj in value.all()] - return self.separator.join(ids) + def render(self, value, obj=None, **kwargs): + """ + :return: A string with values separated by ``separator``. + ``None`` values are returned as empty strings. + ``coerce_to_string`` has no effect on the return value. + """ + self._obj_deprecation_warning(obj) + if value is not None: + ids = [smart_str(getattr(obj, self.field)) for obj in value.all()] + return self.separator.join(ids) + return "" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..feaa1e996 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,86 @@ +[build-system] +requires = ["setuptools>=60", "setuptools-scm>=7.1.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "django-import-export" +authors = [ + {name = "Bojan Mihelač", email = "djangoimportexport@gmail.com"}, +] +maintainers = [ + {name = "Matthew Hegarty", email = "djangoimportexport@gmail.com"}, +] +description = "Django application and library for importing and exporting data with included admin integration." +keywords = ["django", "import", "export"] +license = {file = "LICENSE"} +requires-python = ">=3.9" +readme = "README.rst" +dynamic = ["version"] +classifiers = [ + "Framework :: Django", + "Framework :: Django :: 4.2", + "Framework :: Django :: 5.0", + "Framework :: Django :: 5.1", + "Framework :: Django :: 5.2", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3 :: Only", + "Topic :: Software Development", +] + +dependencies = [ + "diff-match-patch==20241021", + "Django>=4.2", + "tablib>=3.7.0" +] + +[project.optional-dependencies] +all = [ + "tablib[all]" +] +cli = ["tablib[cli]"] +ods = ["tablib[ods]"] +pandas = ["tablib[pandas]"] +xls = ["tablib[xls]"] +xlsx = ["tablib[xlsx]"] +yaml = ["tablib[yaml]"] +docs = [ + "sphinx==8.1.3", + "sphinx-rtd-theme==3.0.1", + "openpyxl==3.1.5" +] +tests = [ + "psycopg2-binary==2.9.10", + "mysqlclient==2.2.5", + "chardet==5.2.0", + "pytz==2024.2", + "memory-profiler==0.61.0", + "django-extensions==3.2.3", + "coverage==7.6.4", + "tablib[all]>=3.7.0", + "setuptools-scm==8.1.0", +] + +[project.urls] +Documentation = "https://django-import-export.readthedocs.io/en/stable/" +Repository = "https://github.com/django-import-export/django-import-export" +Changelog = "https://github.com/django-import-export/django-import-export/blob/main/docs/changelog.rst" + +[tool.setuptools] +platforms = ["OS Independent"] +license-files = ["LICENSE", "AUTHORS"] + +[tool.setuptools_scm] +write_to = "import_export/_version.py" +local_scheme = "no-local-version" + +[tool.isort] +profile = "black" diff --git a/requirements/base.txt b/requirements/base.txt deleted file mode 100644 index 9a47dd194..000000000 --- a/requirements/base.txt +++ /dev/null @@ -1,3 +0,0 @@ -Django>=2.2 -tablib[html,ods,xls,xlsx,yaml]>=3.0.0 -diff-match-patch diff --git a/requirements/deploy.txt b/requirements/deploy.txt deleted file mode 100644 index 4ffaaa4bf..000000000 --- a/requirements/deploy.txt +++ /dev/null @@ -1,2 +0,0 @@ -wheel -zest.releaser \ No newline at end of file diff --git a/requirements/docs.txt b/requirements/docs.txt deleted file mode 100644 index cbf1e3658..000000000 --- a/requirements/docs.txt +++ /dev/null @@ -1,2 +0,0 @@ -sphinx -sphinx-rtd-theme diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index ef70b981b..000000000 --- a/requirements/test.txt +++ /dev/null @@ -1,8 +0,0 @@ -isort -psycopg2-binary -mysqlclient -coveralls -chardet -pytz -memory-profiler -django-extensions \ No newline at end of file diff --git a/runtests.sh b/runtests.sh index 3c627ca24..ab17259dd 100755 --- a/runtests.sh +++ b/runtests.sh @@ -1 +1,31 @@ -PYTHONPATH=".:tests:$PYTHONPATH" django-admin test core --settings=settings +#!/usr/bin/env sh + +# run tests against all supported databases using tox +# postgres / mysql run via docker +# sqlite (default) runs against local database file (database.db) +# use pyenv or similar to install multiple python instances + +export DJANGO_SETTINGS_MODULE=settings + +export IMPORT_EXPORT_POSTGRESQL_USER=pguser +export IMPORT_EXPORT_POSTGRESQL_PASSWORD=pguserpass + +export IMPORT_EXPORT_MYSQL_USER=mysqluser +export IMPORT_EXPORT_MYSQL_PASSWORD=mysqluserpass + +echo "starting local database instances" +docker compose -f tests/docker-compose.yml up -d + +echo "running tests (sqlite)" +tox + +echo "running tests (mysql)" +export IMPORT_EXPORT_TEST_TYPE=mysql-innodb +tox + +echo "running tests (postgres)" +export IMPORT_EXPORT_TEST_TYPE=postgres +tox + +echo "removing local database instances" +docker compose -f tests/docker-compose.yml down -v diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 6b0cf2400..000000000 --- a/setup.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[metadata] -license_file = LICENSE - -[zest.releaser] -create-wheel = yes -python-file-with-version = import_export/__init__.py - -[isort] -profile = black diff --git a/setup.py b/setup.py deleted file mode 100644 index 431533160..000000000 --- a/setup.py +++ /dev/null @@ -1,59 +0,0 @@ -import os - -from setuptools import find_packages, setup - -VERSION = __import__("import_export").__version__ - -CLASSIFIERS = [ - 'Framework :: Django', - 'Framework :: Django :: 2.2', - 'Framework :: Django :: 3.1', - 'Framework :: Django :: 3.2', - 'Framework :: Django :: 4.0', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: Software Development', -] - -install_requires = [ - 'diff-match-patch', - 'Django>=2.2', - 'tablib[html,ods,xls,xlsx,yaml]>=3.0.0', -] - - -with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f: - readme = f.read() - - -setup( - name="django-import-export", - description="Django application and library for importing and exporting" - " data with included admin integration.", - long_description=readme, - version=VERSION, - author="Informatika Mihelac", - author_email="bmihelac@mihelac.org", - license='BSD License', - platforms=['OS Independent'], - url="https://github.com/django-import-export/django-import-export", - project_urls={ - "Documentation": "https://django-import-export.readthedocs.io/en/stable/", - "Changelog": "https://django-import-export.readthedocs.io/en/stable/changelog.html", - }, - packages=find_packages(exclude=["tests"]), - include_package_data=True, - install_requires=install_requires, - python_requires=">=3.6", - classifiers=CLASSIFIERS, - zip_safe=False, -) diff --git a/tests/bulk/README.md b/tests/bulk/README.md deleted file mode 100644 index e5bc1505c..000000000 --- a/tests/bulk/README.md +++ /dev/null @@ -1,192 +0,0 @@ -## Bulk import testing - -This scripts outlines the steps used to profile bulk loading. -The `bulk_import.py` script is used to profile run duration and memory during bulk load testing. - -### Pre-requisites - -- [Docker](https://docker.com) -- [virtualenvwrapper](https://virtualenvwrapper.readthedocs.io/en/latest/command_ref.html) - -### Test environment - -The following tests were run on the following platform: - -- Thinkpad T470 i5 processor (Ubuntu 18.04) -- python 3.8.1 -- Postgres 10 (docker container) - -### Install dependencies - -```bash -# create venv and install django-import-export dependencies -cd -mkvirtualenv -p python3 djangoimportexport -pip install -r requirements/base.txt -r requirements/test.txt -``` - -### Create Postgres DB - -```bash -export IMPORT_EXPORT_TEST_TYPE=postgres -export IMPORT_EXPORT_POSTGRESQL_USER=pguser -export IMPORT_EXPORT_POSTGRESQL_PASSWORD=pguserpass -export DJANGO_SETTINGS_MODULE=settings - -cd /tests - -# start a local postgres instance -docker-compose -f bulk/docker-compose.yml up -d - -./manage.py migrate -./manage.py test - -# only required if you want to login to the Admin site -./manage.py createsuperuser --username admin --email=email@example.com -``` - -### Update settings - -In order to use the `runscript` command, add `django_extensions` to `settings.py` (`INSTALLED_APPS`). - -### Running script - -```bash -# run creates, updates, and deletes -./manage.py runscript bulk_import - -# pass 'create', 'update' or 'delete' to run the single test -./manage.py runscript bulk_import --script-args create -``` - -### Results - -- Used 20k book entries -- Memory is reported as the peak memory value whilst running the test script - -#### bulk_create - -##### Default settings - -- default settings -- uses `ModelInstanceLoader` by default - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------- | ----------- | ------------- | -| `use_bulk=False` | 42.67 | 16.22 | -| `use_bulk=True, batch_size=None` | 33.72 | 50.02 | -| `use_bulk=True, batch_size=1000` | 33.21 | 11.43 | - -##### Performance tweaks - -| use_bulk | batch_size | skip_diff | instance_loader | time (secs) | peak mem (MB) | -| -------- | ---------- | --------- | -------------------- | ----------- | ------------- | -| True | 1000 | True | force_init_instance | 9.60 | 9.4 | -| True | 1000 | False | CachedInstanceLoader | 13.72 | 9.9 | -| True | 1000 | True | CachedInstanceLoader | 16.12 | 9.5 | -| True | 1000 | False | force_init_instance | 19.93 | 10.5 | -| False | n/a | False | force_init_instance | 26.59 | 14.1 | -| True | 1000 | False | ModelInstanceLoader | 28.60 | 9.7 | -| True | 1000 | False | ModelInstanceLoader | 33.19 | 10.6 | -| False | n/a | False | ModelInstanceLoader | 45.32 | 16.3 | - -(`force_init_instance`) means overriding `get_or_init_instance()` - this can be done when you know for certain that you are importing new rows: - -```python -def get_or_init_instance(self, instance_loader, row): - return self._meta.model(), True -``` - -#### bulk_update - -```bash -./manage.py runscript bulk_import --script-args update -``` - -##### Default settings - -- `skip_diff = False` -- `instance_loader_class = ModelInstanceLoader` - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------- | ----------- | ------------- | -| `use_bulk=False` | 82.28 | 9.33 | -| `use_bulk=True, batch_size=None` | 92.41 | 202.26 | -| `use_bulk=True, batch_size=1000` | 52.63 | 11.25 | - -##### Performance tweaks - -- `skip_diff = True` -- `instance_loader_class = CachedInstanceLoader` - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------- | ----------- | ------------- | -| `use_bulk=False` | 28.85 | 20.71 | -| `use_bulk=True, batch_size=None` | 65.11 | 201.01 | -| `use_bulk=True, batch_size=1000` | 21.56 | 21.25 | - - -- `skip_diff = False` - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------------- | ----------- | ------------- | -| `use_bulk=True, batch_size=1000` | 9.26 | 8.51 | -| `skip_html_diff=True, batch_size=1000` | 8.69 | 7.50 | -| `skip_unchanged=True, batch_size=1000` | 5.42 | 7.34 | - - -#### bulk delete - -```bash -./manage.py runscript bulk_import --script-args delete -``` - -##### Default settings - -- `skip_diff = False` -- `instance_loader_class = ModelInstanceLoader` - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------- | ----------- | ------------- | -| `use_bulk=False` | 95.56 | 31.36 | -| `use_bulk=True, batch_size=None` | 50.20 | 64.66 | -| `use_bulk=True, batch_size=1000` | 43.77 | 33.123 | - -##### Performance tweaks - -- `skip_diff = True` -- `instance_loader_class = CachedInstanceLoader` - -| Condition | Time (secs) | Memory (MB) | -| ---------------------------------- | ----------- | ------------- | -| `use_bulk=False` | 61.66 | 31.94 | -| `use_bulk=True, batch_size=None` | 14.08 | 39.40 | -| `use_bulk=True, batch_size=1000` | 15.37 | 32.70 | - -### Checking DB - -Note that the db is cleared down after each test run. -You need to uncomment the `delete()` calls to be able to view data. - -```bash -./manage.py shell_plus - -Book.objects.all().count() -``` - -### Clear down - -Optional clear down of resources: - -```bash -# remove the test db container -docker-compose -f bulk/docker-compose.yml down -v - -# remove venv -deactivate -rmvirtualenv djangoimportexport -``` - -### References - -- https://hakibenita.com/fast-load-data-python-postgresql diff --git a/tests/bulk/docker-compose.yml b/tests/bulk/docker-compose.yml deleted file mode 100644 index b7bfa8e33..000000000 --- a/tests/bulk/docker-compose.yml +++ /dev/null @@ -1,23 +0,0 @@ -version: '3.3' -services: - db: - container_name: importexport_pgdb - environment: - IMPORT_EXPORT_TEST_TYPE: 'postgres' - DB_HOST: 'db' - DB_PORT: '5432' - DB_NAME: 'import_export' - IMPORT_EXPORT_POSTGRESQL_USER: ${IMPORT_EXPORT_POSTGRESQL_USER} - IMPORT_EXPORT_POSTGRESQL_PASSWORD: ${IMPORT_EXPORT_POSTGRESQL_PASSWORD} - POSTGRES_PASSWORD: ${IMPORT_EXPORT_POSTGRESQL_PASSWORD} - image: postgres:10 - restart: "no" - ports: - - 5432:5432 - volumes: - - ./docker/db/:/docker-entrypoint-initdb.d/ - - local-db-data:/var/lib/postgresql/data - -volumes: - local-db-data: - driver: local diff --git a/tests/core/admin.py b/tests/core/admin.py index 90f007f20..0106cfc46 100644 --- a/tests/core/admin.py +++ b/tests/core/admin.py @@ -1,12 +1,18 @@ from django.contrib import admin -from import_export.admin import ExportActionModelAdmin, ImportExportMixin, ImportMixin +from import_export.admin import ( + ExportActionModelAdmin, + ImportExportModelAdmin, + ImportMixin, +) +from import_export.fields import Field from import_export.resources import ModelResource -from .forms import CustomConfirmImportForm, CustomImportForm -from .models import Author, Book, Category, Child, EBook +from .forms import CustomConfirmImportForm, CustomExportForm, CustomImportForm +from .models import Author, Book, Category, Child, EBook, UUIDBook, UUIDCategory +@admin.register(Child) class ChildAdmin(ImportMixin, admin.ModelAdmin): pass @@ -17,43 +23,100 @@ class Meta: model = Book def for_delete(self, row, instance): - return self.fields['name'].clean(row) == '' + return self.fields["name"].clean(row) == "" -class BookAdmin(ImportExportMixin, admin.ModelAdmin): - list_display = ('name', 'author', 'added') - list_filter = ['categories', 'author'] - resource_class = BookResource +class BookNameResource(ModelResource): + class Meta: + model = Book + fields = ["id", "name"] + name = "Export/Import only book names" + + +@admin.register(Book) +class BookAdmin(ImportExportModelAdmin): + list_display = ("name", "author", "added") + list_filter = ["categories", "author"] + resource_classes = [BookResource, BookNameResource] + change_list_template = "core/admin/change_list.html" +@admin.register(Category) class CategoryAdmin(ExportActionModelAdmin): + def get_queryset(self, request): + return Category.objects.all() + + +@admin.register(UUIDBook) +class UUIDBookAdmin(ImportExportModelAdmin): pass +@admin.register(UUIDCategory) +class UUIDCategoryAdmin(ExportActionModelAdmin): + pass + + +@admin.register(Author) class AuthorAdmin(ImportMixin, admin.ModelAdmin): pass -class CustomBookAdmin(BookAdmin): - """BookAdmin with custom import forms""" +class UUIDBookResource(ModelResource): + class Meta: + model = UUIDBook - def get_import_form(self): - return CustomImportForm - def get_confirm_import_form(self): - return CustomConfirmImportForm +class EBookResource(ModelResource): + published = Field(attribute="published", column_name="published_date") + author_email = Field(attribute="author_email", column_name="Email of the author") + auteur_name = Field(attribute="author__name", column_name="Author Name") - def get_form_kwargs(self, form, *args, **kwargs): - # update kwargs with authors (from CustomImportForm.cleaned_data) - if isinstance(form, CustomImportForm): - if form.is_valid(): - author = form.cleaned_data['author'] - kwargs.update({'author': author.id}) - return kwargs + def __init__(self, **kwargs): + super().__init__() + self.author_id = kwargs.get("author_id") + def filter_export(self, queryset, **kwargs): + return queryset.filter(author_id=self.author_id) -admin.site.register(Book, BookAdmin) -admin.site.register(Category, CategoryAdmin) -admin.site.register(Author, AuthorAdmin) -admin.site.register(Child, ChildAdmin) -admin.site.register(EBook, CustomBookAdmin) + class Meta: + model = EBook + fields = ("id", "author_email", "name", "published", "auteur_name") + + +@admin.register(EBook) +class CustomBookAdmin(ExportActionModelAdmin, ImportExportModelAdmin): + """Example usage of custom import / export forms""" + + resource_classes = [EBookResource] + import_form_class = CustomImportForm + confirm_form_class = CustomConfirmImportForm + export_form_class = CustomExportForm + + def get_confirm_form_initial(self, request, import_form): + initial = super().get_confirm_form_initial(request, import_form) + # Pass on the `author` value from the import form to + # the confirm form (if provided) + if import_form: + initial["author"] = import_form.cleaned_data["author"].id + return initial + + def get_import_resource_kwargs(self, request, **kwargs): + # update resource kwargs so that the Resource is passed the authenticated user + # This is included as an example of how dynamic values + # can be passed to resources + if "form" not in kwargs: + # test for #1789 + raise ValueError("'form' param was expected in kwargs") + kwargs = super().get_resource_kwargs(request, **kwargs) + kwargs.update({"user": request.user}) + return kwargs + + def get_export_resource_kwargs(self, request, **kwargs): + # this is overridden to demonstrate that custom form fields can be used + # to override the export query. + # The dict returned here will be passed as kwargs to EBookResource + export_form = kwargs.get("export_form") + if export_form: + kwargs.update(author_id=export_form.cleaned_data["author"].id) + return kwargs diff --git a/tests/core/exports/authors.csv b/tests/core/exports/authors.csv new file mode 100644 index 000000000..c786a454b --- /dev/null +++ b/tests/core/exports/authors.csv @@ -0,0 +1,2 @@ +id,name +1,J. R. R. Tolkien diff --git a/tests/core/exports/books-ISO-8859-1.csv b/tests/core/exports/books-ISO-8859-1.csv new file mode 100644 index 000000000..b27422355 --- /dev/null +++ b/tests/core/exports/books-ISO-8859-1.csv @@ -0,0 +1,2 @@ +id,name,author_email +1,Merci toi,test@example.com diff --git a/tests/core/exports/books-empty-author-email.xlsx b/tests/core/exports/books-empty-author-email.xlsx new file mode 100644 index 000000000..5b776db53 Binary files /dev/null and b/tests/core/exports/books-empty-author-email.xlsx differ diff --git a/tests/core/exports/books-invalid-date.csv b/tests/core/exports/books-invalid-date.csv new file mode 100644 index 000000000..ad5b2e55e --- /dev/null +++ b/tests/core/exports/books-invalid-date.csv @@ -0,0 +1,3 @@ +id,name,published +1,book,1996-01-01 +2,Some book,1996x-01-01 diff --git a/tests/core/exports/books-no-headers.csv b/tests/core/exports/books-no-headers.csv new file mode 100644 index 000000000..8baef1b4a --- /dev/null +++ b/tests/core/exports/books-no-headers.csv @@ -0,0 +1 @@ +abc diff --git a/tests/core/exports/books.json b/tests/core/exports/books.json new file mode 100644 index 000000000..62faac505 --- /dev/null +++ b/tests/core/exports/books.json @@ -0,0 +1,38 @@ +[ + { + "id": 11, + "name": "A Game of Thrones", + "author": 11, + "author_email": "martin@got.com", + "imported": "0", + "published": "1996-08-01", + "published_time": "21:00:00", + "price": "25.00", + "added": "", + "categories": "1" + }, + { + "id": 6, + "name": "Circles", + "author": 11, + "author_email": "geo@met.ry", + "imported": "0", + "published": "2020-08-01", + "published_time": "21:00:00", + "price": "15.00", + "added": "", + "categories": "2" + }, + { + "id": 5, + "name": "Squares", + "author": 5, + "author_email": "geo@met.ry", + "imported": "0", + "published": "1999-08-01", + "published_time": "21:00:00", + "price": "5.00", + "added": "", + "categories": "2" + } +] \ No newline at end of file diff --git a/tests/core/exports/ebooks.csv b/tests/core/exports/ebooks.csv new file mode 100644 index 000000000..2420f4b46 --- /dev/null +++ b/tests/core/exports/ebooks.csv @@ -0,0 +1,2 @@ +id,name,Email of the author +1,Some book,test@example.com diff --git a/tests/core/fixtures/author.json b/tests/core/fixtures/author.json new file mode 100644 index 000000000..90c11148c --- /dev/null +++ b/tests/core/fixtures/author.json @@ -0,0 +1,18 @@ +[ + { + "model": "core.author", + "pk": 11, + "fields": { + "name": "George R. R. Martin", + "birthday": "1948-09-20" + } + }, + { + "model": "core.author", + "pk": 5, + "fields": { + "name": "Ian Fleming", + "birthday": "1908-05-28" + } + } +] diff --git a/tests/core/fixtures/book.json b/tests/core/fixtures/book.json index 84d2d7440..b193f6f65 100644 --- a/tests/core/fixtures/book.json +++ b/tests/core/fixtures/book.json @@ -1,20 +1,4 @@ [ - { - "model": "core.author", - "pk": 11, - "fields": { - "name": "George R. R. Martin", - "birthday": "1948-09-20" - } - }, - { - "model": "core.author", - "pk": 5, - "fields": { - "name": "Geo Metry", - "birthday": "1950-12-20" - } - }, { "model": "core.book", "pk": 11, @@ -33,11 +17,11 @@ "model": "core.book", "pk": 5, "fields": { - "name": "Squares", + "name": "The Man with the Golden Gun", "author": 5, - "author_email": "geo@met.ry", + "author_email": "ian@example.com", "imported": false, - "published": "1999-08-01", + "published": "1965-04-01", "published_time": "21:00", "price": 5.0, "categories": [2] @@ -57,4 +41,4 @@ "categories": [2] } } -] \ No newline at end of file +] diff --git a/tests/core/forms.py b/tests/core/forms.py index 253d8b9c5..7d41398b4 100644 --- a/tests/core/forms.py +++ b/tests/core/forms.py @@ -1,20 +1,31 @@ from django import forms -from import_export.forms import ConfirmImportForm, ImportForm +from import_export.forms import ( + ConfirmImportForm, + ImportForm, + SelectableFieldsExportForm, +) from .models import Author class AuthorFormMixin(forms.Form): - author = forms.ModelChoiceField(queryset=Author.objects.all(), - required=True) + author = forms.ModelChoiceField(queryset=Author.objects.all(), required=True) class CustomImportForm(AuthorFormMixin, ImportForm): """Customized ImportForm, with author field required""" + pass class CustomConfirmImportForm(AuthorFormMixin, ConfirmImportForm): """Customized ConfirmImportForm, with author field required""" + pass + + +class CustomExportForm(AuthorFormMixin, SelectableFieldsExportForm): + """Customized ExportForm, with author field required.""" + + author = forms.ModelChoiceField(queryset=Author.objects.all(), required=True) diff --git a/tests/core/migrations/0001_initial.py b/tests/core/migrations/0001_initial.py index 9ab5435a0..60e481963 100644 --- a/tests/core/migrations/0001_initial.py +++ b/tests/core/migrations/0001_initial.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -14,64 +13,168 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Author', + name="Author", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ('birthday', models.DateTimeField(auto_now_add=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100)), + ("birthday", models.DateTimeField(auto_now_add=True)), ], ), migrations.CreateModel( - name='Book', + name="Book", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100, verbose_name='Book name')), - ('author_email', models.EmailField(blank=True, max_length=75, verbose_name='Author email')), - ('imported', models.BooleanField(default=False)), - ('published', models.DateField(blank=True, null=True, verbose_name='Published')), - ('price', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)), - ('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Author')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100, verbose_name="Book name")), + ( + "author_email", + models.EmailField( + blank=True, max_length=75, verbose_name="Author email" + ), + ), + ("imported", models.BooleanField(default=False)), + ( + "published", + models.DateField(blank=True, null=True, verbose_name="Published"), + ), + ( + "price", + models.DecimalField( + blank=True, decimal_places=2, max_digits=10, null=True + ), + ), + ( + "author", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="core.Author", + ), + ), ], ), migrations.CreateModel( - name='Category', + name="Category", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100)), ], ), migrations.CreateModel( - name='Entry', + name="Entry", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='Profile', + name="Profile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_private', models.BooleanField(default=True)), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("is_private", models.BooleanField(default=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='WithDefault', + name="WithDefault", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(blank=True, default='foo_bar', max_length=75, verbose_name='Default')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.CharField( + blank=True, + default="foo_bar", + max_length=75, + verbose_name="Default", + ), + ), ], ), migrations.CreateModel( - name='WithDynamicDefault', + name="WithDynamicDefault", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(default=core.models.random_name, max_length=100, verbose_name='Dyn Default')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.CharField( + default=core.models.random_name, + max_length=100, + verbose_name="Dyn Default", + ), + ), ], ), migrations.AddField( - model_name='book', - name='categories', - field=models.ManyToManyField(blank=True, to='core.Category'), + model_name="book", + name="categories", + field=models.ManyToManyField(blank=True, to="core.Category"), ), ] diff --git a/tests/core/migrations/0002_book_published_time.py b/tests/core/migrations/0002_book_published_time.py index a3cabfc31..e74935626 100644 --- a/tests/core/migrations/0002_book_published_time.py +++ b/tests/core/migrations/0002_book_published_time.py @@ -2,15 +2,16 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0001_initial'), + ("core", "0001_initial"), ] operations = [ migrations.AddField( - model_name='book', - name='published_time', - field=models.TimeField(blank=True, null=True, verbose_name='Time published'), + model_name="book", + name="published_time", + field=models.TimeField( + blank=True, null=True, verbose_name="Time published" + ), ), ] diff --git a/tests/core/migrations/0003_withfloatfield.py b/tests/core/migrations/0003_withfloatfield.py index d8ab96624..8602c44a2 100644 --- a/tests/core/migrations/0003_withfloatfield.py +++ b/tests/core/migrations/0003_withfloatfield.py @@ -2,17 +2,24 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0002_book_published_time'), + ("core", "0002_book_published_time"), ] operations = [ migrations.CreateModel( - name='WithFloatField', + name="WithFloatField", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('f', models.FloatField(blank=True, null=True)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("f", models.FloatField(blank=True, null=True)), ], ), ] diff --git a/tests/core/migrations/0004_bookwithchapters.py b/tests/core/migrations/0004_bookwithchapters.py index b8faa0009..e170d4a6f 100644 --- a/tests/core/migrations/0004_bookwithchapters.py +++ b/tests/core/migrations/0004_bookwithchapters.py @@ -9,7 +9,9 @@ try: from django.contrib.postgres.fields import ArrayField, JSONField - chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None) + chapters_field = ArrayField( + base_field=models.CharField(max_length=100), default=list, size=None + ) data_field = JSONField(null=True) can_use_postgres_fields = True except ImportError: @@ -18,26 +20,35 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0003_withfloatfield'), + ("core", "0003_withfloatfield"), ] operations = [] pg_only_operations = [ migrations.CreateModel( - name='BookWithChapters', + name="BookWithChapters", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100, verbose_name='Book name')), - ('chapters', chapters_field), - ('data', data_field) + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100, verbose_name="Book name")), + ("chapters", chapters_field), + ("data", data_field), ], ), ] def apply(self, project_state, schema_editor, collect_sql=False): - if can_use_postgres_fields and schema_editor.connection.vendor.startswith("postgres"): + if can_use_postgres_fields and schema_editor.connection.vendor.startswith( + "postgres" + ): self.operations = self.operations + self.pg_only_operations return super().apply(project_state, schema_editor, collect_sql) diff --git a/tests/core/migrations/0005_addparentchild.py b/tests/core/migrations/0005_addparentchild.py index 572b37dde..3ad6ec9c8 100644 --- a/tests/core/migrations/0005_addparentchild.py +++ b/tests/core/migrations/0005_addparentchild.py @@ -3,29 +3,46 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0004_bookwithchapters'), + ("core", "0004_bookwithchapters"), ] operations = [ migrations.CreateModel( - name='Child', + name="Child", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100)), ], ), migrations.CreateModel( - name='Parent', + name="Parent", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100)), ], ), migrations.AddField( - model_name='child', - name='parent', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Parent'), + model_name="child", + name="parent", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="core.Parent" + ), ), ] diff --git a/tests/core/migrations/0006_auto_20171130_0147.py b/tests/core/migrations/0006_auto_20171130_0147.py index dc9c57681..6c5b6e14f 100644 --- a/tests/core/migrations/0006_auto_20171130_0147.py +++ b/tests/core/migrations/0006_auto_20171130_0147.py @@ -2,15 +2,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0005_addparentchild'), + ("core", "0005_addparentchild"), ] operations = [ migrations.AlterField( - model_name='category', - name='name', + model_name="category", + name="name", field=models.CharField(max_length=100, unique=True), ), ] diff --git a/tests/core/migrations/0007_auto_20180628_0411.py b/tests/core/migrations/0007_auto_20180628_0411.py index 6ca0f7867..21181f5b8 100644 --- a/tests/core/migrations/0007_auto_20180628_0411.py +++ b/tests/core/migrations/0007_auto_20180628_0411.py @@ -4,29 +4,53 @@ class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('core', '0006_auto_20171130_0147'), + ("core", "0006_auto_20171130_0147"), ] operations = [ migrations.CreateModel( - name='Person', + name="Person", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ], ), migrations.CreateModel( - name='Role', + name="Role", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "user", + models.OneToOneField( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddField( - model_name='person', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Role'), + model_name="person", + name="role", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="core.Role" + ), ), ] diff --git a/tests/core/migrations/0008_auto_20190409_0846.py b/tests/core/migrations/0008_auto_20190409_0846.py index 257bd6c87..79757a98f 100644 --- a/tests/core/migrations/0008_auto_20190409_0846.py +++ b/tests/core/migrations/0008_auto_20190409_0846.py @@ -4,25 +4,23 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0007_auto_20180628_0411'), + ("core", "0007_auto_20180628_0411"), ] operations = [ migrations.CreateModel( - name='EBook', - fields=[ - ], + name="EBook", + fields=[], options={ - 'proxy': True, - 'indexes': [], + "proxy": True, + "indexes": [], }, - bases=('core.book',), + bases=("core.book",), ), migrations.AddField( - model_name='book', - name='added', + model_name="book", + name="added", field=models.DateTimeField(blank=True, null=True), ), ] diff --git a/tests/core/migrations/0009_auto_20211111_0807.py b/tests/core/migrations/0009_auto_20211111_0807.py index 1ca54001e..5d97bbb72 100644 --- a/tests/core/migrations/0009_auto_20211111_0807.py +++ b/tests/core/migrations/0009_auto_20211111_0807.py @@ -4,70 +4,93 @@ class Migration(migrations.Migration): - dependencies = [ - ('core', '0008_auto_20190409_0846'), + ("core", "0008_auto_20190409_0846"), ] operations = [ migrations.AlterField( - model_name='author', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="author", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='book', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="book", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='category', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="category", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='child', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="child", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='entry', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="entry", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='parent', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="parent", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='person', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="person", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='profile', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="profile", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='role', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="role", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='withdefault', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="withdefault", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='withdynamicdefault', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="withdynamicdefault", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), migrations.AlterField( - model_name='withfloatfield', - name='id', - field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + model_name="withfloatfield", + name="id", + field=models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), ), ] diff --git a/tests/core/migrations/0010_uuidbook.py b/tests/core/migrations/0010_uuidbook.py new file mode 100644 index 000000000..626ee4048 --- /dev/null +++ b/tests/core/migrations/0010_uuidbook.py @@ -0,0 +1,25 @@ +# Generated by Django 2.2.7 on 2021-05-02 07:46 +import uuid + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0009_auto_20211111_0807"), + ] + + operations = [ + migrations.CreateModel( + name="UUIDBook", + fields=[ + ( + "id", + models.UUIDField( + primary_key=True, default=uuid.uuid4, editable=False + ), + ), + ("name", models.CharField(max_length=100, verbose_name="Book name")), + ], + ), + ] diff --git a/tests/core/migrations/0011_uuidcategory_legacybook_alter_uuidbook_id_and_more.py b/tests/core/migrations/0011_uuidcategory_legacybook_alter_uuidbook_id_and_more.py new file mode 100644 index 000000000..af69b630d --- /dev/null +++ b/tests/core/migrations/0011_uuidcategory_legacybook_alter_uuidbook_id_and_more.py @@ -0,0 +1,51 @@ +# Generated by Django 4.0.4 on 2022-05-12 12:39 + +import uuid + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0010_uuidbook"), + ] + + operations = [ + migrations.CreateModel( + name="UUIDCategory", + fields=[ + ( + "catid", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ("name", models.CharField(max_length=32)), + ], + ), + migrations.CreateModel( + name="LegacyBook", + fields=[], + options={ + "proxy": True, + "indexes": [], + "constraints": [], + }, + bases=("core.book",), + ), + migrations.AlterField( + model_name="uuidbook", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), + ), + migrations.AddField( + model_name="uuidbook", + name="categories", + field=models.ManyToManyField(blank=True, to="core.uuidcategory"), + ), + ] diff --git a/tests/core/migrations/0012_delete_legacybook.py b/tests/core/migrations/0012_delete_legacybook.py new file mode 100644 index 000000000..8c56d2666 --- /dev/null +++ b/tests/core/migrations/0012_delete_legacybook.py @@ -0,0 +1,15 @@ +# Generated by Django 4.2.3 on 2023-09-15 08:20 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0011_uuidcategory_legacybook_alter_uuidbook_id_and_more"), + ] + + operations = [ + migrations.DeleteModel( + name="LegacyBook", + ), + ] diff --git a/tests/core/migrations/0013_alter_author_birthday.py b/tests/core/migrations/0013_alter_author_birthday.py new file mode 100644 index 000000000..5a2e93240 --- /dev/null +++ b/tests/core/migrations/0013_alter_author_birthday.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.3 on 2023-10-11 03:54 + +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0012_delete_legacybook"), + ] + + operations = [ + migrations.AlterField( + model_name="author", + name="birthday", + field=models.DateTimeField(default=django.utils.timezone.now), + ), + ] diff --git a/tests/core/migrations/0014_bookwithchapternumbers.py b/tests/core/migrations/0014_bookwithchapternumbers.py new file mode 100644 index 000000000..d3e622f5d --- /dev/null +++ b/tests/core/migrations/0014_bookwithchapternumbers.py @@ -0,0 +1,51 @@ +from django.db import migrations, models + +can_use_postgres_fields = False + +# Dummy fields +chapter_numbers_field = models.Field() + +try: + from django.contrib.postgres.fields import ArrayField + + chapter_numbers_field = ArrayField( + base_field=models.PositiveSmallIntegerField(), default=list, size=None + ) + can_use_postgres_fields = True +except ImportError: + # We can't use ArrayField if psycopg2 is not installed - issue #1125 + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0013_alter_author_birthday"), + ] + + operations = [] + + pg_only_operations = [ + migrations.CreateModel( + name="BookWithChapterNumbers", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100, verbose_name="Book name")), + ("chapter_numbers", chapter_numbers_field), + ], + ), + ] + + def apply(self, project_state, schema_editor, collect_sql=False): + if can_use_postgres_fields and schema_editor.connection.vendor.startswith( + "postgres" + ): + self.operations = self.operations + self.pg_only_operations + return super().apply(project_state, schema_editor, collect_sql) diff --git a/tests/core/migrations/0015_withpositiveintegerfields.py b/tests/core/migrations/0015_withpositiveintegerfields.py new file mode 100644 index 000000000..9763112f5 --- /dev/null +++ b/tests/core/migrations/0015_withpositiveintegerfields.py @@ -0,0 +1,28 @@ +# Generated by Django 5.0.4 on 2024-04-23 22:46 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0014_bookwithchapternumbers"), + ] + + operations = [ + migrations.CreateModel( + name="WithPositiveIntegerFields", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("big", models.PositiveBigIntegerField(null=True)), + ("small", models.PositiveSmallIntegerField(null=True)), + ], + ), + ] diff --git a/tests/core/migrations/0016_alter_category_options_alter_uuidcategory_options.py b/tests/core/migrations/0016_alter_category_options_alter_uuidcategory_options.py new file mode 100644 index 000000000..d126e7867 --- /dev/null +++ b/tests/core/migrations/0016_alter_category_options_alter_uuidcategory_options.py @@ -0,0 +1,20 @@ +# Generated by Django 5.0.4 on 2024-05-18 07:17 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0015_withpositiveintegerfields"), + ] + + operations = [ + migrations.AlterModelOptions( + name="category", + options={"verbose_name_plural": "categories"}, + ), + migrations.AlterModelOptions( + name="uuidcategory", + options={"verbose_name_plural": "UUID categories"}, + ), + ] diff --git a/tests/core/migrations/0017_namedauthor_uuidbook_author.py b/tests/core/migrations/0017_namedauthor_uuidbook_author.py new file mode 100644 index 000000000..a23777962 --- /dev/null +++ b/tests/core/migrations/0017_namedauthor_uuidbook_author.py @@ -0,0 +1,32 @@ +# Generated by Django 5.0.4 on 2024-05-24 04:10 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0016_alter_category_options_alter_uuidcategory_options"), + ] + + operations = [ + migrations.CreateModel( + name="NamedAuthor", + fields=[ + ( + "name", + models.CharField(max_length=256, primary_key=True, serialize=False), + ), + ], + ), + migrations.AddField( + model_name="uuidbook", + name="author", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="core.namedauthor", + ), + ), + ] diff --git a/tests/core/models.py b/tests/core/models.py index 97f5d5c39..f9599e4fe 100644 --- a/tests/core/models.py +++ b/tests/core/models.py @@ -1,13 +1,38 @@ import random import string +import uuid from django.core.exceptions import ValidationError from django.db import models +from django.utils import timezone + + +class AuthorManager(models.Manager): + """ + Used to enable the get_by_natural_key method. + NOTE: Manager classes are only required to enable + using the natural key functionality of ForeignKeyWidget + """ + + def get_by_natural_key(self, name): + """ + Django pattern function for finding an author by its name + """ + return self.get(name=name) class Author(models.Model): + objects = AuthorManager() + name = models.CharField(max_length=100) - birthday = models.DateTimeField(auto_now_add=True) + birthday = models.DateTimeField(default=timezone.now) + + def natural_key(self): + """ + Django pattern function for serializing a model by its natural key + Used only by the ForeignKeyWidget using use_natural_foreign_keys. + """ + return (self.name,) def __str__(self): return self.name @@ -18,8 +43,8 @@ def full_clean(self, exclude=None, validate_unique=True): exclude = [] else: exclude = list(exclude) - if 'name' not in exclude and self.name == '123': - raise ValidationError({'name': "'123' is not a valid value"}) + if "name" not in exclude and self.name == "123": + raise ValidationError({"name": "'123' is not a valid value"}) class Category(models.Model): @@ -31,19 +56,47 @@ class Category(models.Model): def __str__(self): return self.name + class Meta: + verbose_name_plural = "categories" + + +class BookManager(models.Manager): + """ + Added to enable get_by_natural_key method + NOTE: Manager classes are only required to enable + using the natural key functionality of ForeignKeyWidget + """ + + def get_by_natural_key(self, name, author): + """ + Django pattern function for returning a book by its natural key + """ + return self.get(name=name, author=Author.objects.get_by_natural_key(author)) + class Book(models.Model): - name = models.CharField('Book name', max_length=100) + objects = BookManager() + + name = models.CharField("Book name", max_length=100) author = models.ForeignKey(Author, blank=True, null=True, on_delete=models.CASCADE) - author_email = models.EmailField('Author email', max_length=75, blank=True) + author_email = models.EmailField("Author email", max_length=75, blank=True) imported = models.BooleanField(default=False) - published = models.DateField('Published', blank=True, null=True) - published_time = models.TimeField('Time published', blank=True, null=True) + published = models.DateField("Published", blank=True, null=True) + published_time = models.TimeField("Time published", blank=True, null=True) price = models.DecimalField(max_digits=10, decimal_places=2, null=True, blank=True) added = models.DateTimeField(blank=True, null=True) categories = models.ManyToManyField(Category, blank=True) + def natural_key(self): + """ + Django pattern function for serializing a book by its natural key. + Used only by the ForeignKeyWidget using use_natural_foreign_keys. + """ + return (self.name,) + self.author.natural_key() + + natural_key.dependencies = ["core.Author"] + def __str__(self): return self.name @@ -60,20 +113,20 @@ class Child(models.Model): name = models.CharField(max_length=100) def __str__(self): - return '%s - child of %s' % (self.name, self.parent.name) + return f"{self.name} - child of {self.parent.name}" class Profile(models.Model): - user = models.OneToOneField('auth.User', on_delete=models.CASCADE) + user = models.OneToOneField("auth.User", on_delete=models.CASCADE) is_private = models.BooleanField(default=True) class Entry(models.Model): - user = models.ForeignKey('auth.User', on_delete=models.CASCADE) + user = models.ForeignKey("auth.User", on_delete=models.CASCADE) class Role(models.Model): - user = models.OneToOneField('auth.User', on_delete=models.CASCADE, null=True) + user = models.OneToOneField("auth.User", on_delete=models.CASCADE, null=True) class Person(models.Model): @@ -81,19 +134,16 @@ class Person(models.Model): class WithDefault(models.Model): - name = models.CharField('Default', max_length=75, blank=True, - default='foo_bar') + name = models.CharField("Default", max_length=75, blank=True, default="foo_bar") def random_name(): chars = string.ascii_lowercase - return ''.join(random.SystemRandom().choice(chars) for _ in range(100)) + return "".join(random.SystemRandom().choice(chars) for _ in range(100)) class WithDynamicDefault(models.Model): - - name = models.CharField('Dyn Default', max_length=100, - default=random_name) + name = models.CharField("Dyn Default", max_length=100, default=random_name) class WithFloatField(models.Model): @@ -102,5 +152,42 @@ class WithFloatField(models.Model): class EBook(Book): """Book proxy model to have a separate admin url access and name""" + class Meta: proxy = True + + +class NamedAuthor(models.Model): + """Class with a named primary key""" + + name = models.CharField(max_length=256, primary_key=True) + + +class UUIDCategory(models.Model): + catid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=32) + + def __str__(self): + return self.name + + class Meta: + verbose_name_plural = "UUID categories" + + +class UUIDBook(models.Model): + """A model which uses a UUID pk (issue 1274)""" + + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField("Book name", max_length=100) + author = models.ForeignKey( + NamedAuthor, blank=True, null=True, on_delete=models.CASCADE + ) + categories = models.ManyToManyField(UUIDCategory, blank=True) + + def __str__(self): + return self.name + + +class WithPositiveIntegerFields(models.Model): + big = models.PositiveBigIntegerField(null=True) + small = models.PositiveSmallIntegerField(null=True) diff --git a/tests/core/templates/core/admin/change_list.html b/tests/core/templates/core/admin/change_list.html new file mode 100644 index 000000000..985705992 --- /dev/null +++ b/tests/core/templates/core/admin/change_list.html @@ -0,0 +1,9 @@ +{% extends "admin/change_list.html" %} +{% comment %} +A template used for testing customizations to the change_list view (See #1483). +{% endcomment %} + +{% block object-tools-items %} + + {{ block.super }} +{% endblock %} diff --git a/tests/core/tests/admin_integration/__init__.py b/tests/core/tests/admin_integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/tests/admin_integration/mixins.py b/tests/core/tests/admin_integration/mixins.py new file mode 100644 index 000000000..7cbdb56a3 --- /dev/null +++ b/tests/core/tests/admin_integration/mixins.py @@ -0,0 +1,135 @@ +import os +from datetime import datetime + +from core.admin import BookAdmin +from django.contrib.auth.models import User + +from import_export.formats.base_formats import DEFAULT_FORMATS + + +class AdminTestMixin: + category_change_url = "/admin/core/category/" + category_export_url = "/admin/core/category/export/" + uuid_category_change_url = "/admin/core/uuidcategory/" + uuid_category_export_url = "/admin/core/uuidcategory/export/" + book_import_url = "/admin/core/book/import/" + book_export_url = "/admin/core/book/export/" + ebook_import_url = "/admin/core/ebook/import/" + ebook_export_url = "/admin/core/ebook/export/" + core_book_url = "/admin/core/book/" + process_ebook_import_url = "/admin/core/ebook/process_import/" + book_process_import_url = "/admin/core/book/process_import/" + ebook_process_import_url = "/admin/core/ebook/process_import/" + legacybook_import_url = "/admin/core/legacybook/import/" + legacybook_process_import_url = "/admin/core/legacybook/process_import/" + core_author_url = "/admin/core/author/" + child_import_url = "/admin/core/child/import/" + change_list_url = "admin/import_export/change_list.html" + child_process_import_url = "/admin/core/child/process_import/" + admin_import_template_url = "admin/import_export/import.html" + change_list_template_url = "admin/import_export/change_list_import_export.html" + import_export_import_template_url = "admin/import_export/import.html" + + def setUp(self): + super().setUp() + self.user = User.objects.create_user("admin", "admin@example.com", "password") + self.user.is_staff = True + self.user.is_superuser = True + self.user.save() + self.client.login(username="admin", password="password") + + def _do_import_post( + self, + url, + filename, + input_format=0, + encoding=None, + resource=None, + follow=False, + data=None, + ): + input_format = input_format + filename = os.path.join( + os.path.dirname(__file__), + os.path.pardir, + os.path.pardir, + "exports", + filename, + ) + with open(filename, "rb") as f: + if data is None: + data = {} + data.update( + { + "format": str(input_format), + "import_file": f, + } + ) + if encoding: + BookAdmin.from_encoding = encoding + if resource: + data.update({"resource": resource}) + response = self.client.post(url, data, follow=follow) + return response + + def _assert_string_in_response( + self, + url, + filename, + input_format, + encoding=None, + str_in_response=None, + follow=False, + status_code=200, + ): + response = self._do_import_post( + url, filename, input_format, encoding=encoding, follow=follow + ) + self.assertEqual(response.status_code, status_code) + self.assertIn("result", response.context) + self.assertFalse(response.context["result"].has_errors()) + if str_in_response is not None: + self.assertContains(response, str_in_response) + + def _get_input_format_index(self, format): + for i, f in enumerate(DEFAULT_FORMATS): + if f().get_title() == format: + xlsx_index = i + break + else: + raise Exception( + "Unable to find %s format. DEFAULT_FORMATS: %r" + % (format, DEFAULT_FORMATS) + ) + return xlsx_index + + def _check_export_file_response( + self, response, target_file_contents, file_prefix="Book" + ): + date_str = datetime.now().strftime("%Y-%m-%d") + self.assertEqual(response.status_code, 200) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="{file_prefix}-{date_str}.csv"', + ) + self.assertEqual(target_file_contents.encode(), response.content) + + def _get_url_response( + self, url, expected_status_code=200, str_in_response=None, html=False + ): + response = self.client.get(url) + assert response.status_code == expected_status_code + if str_in_response is not None: + assert str_in_response in response.content.decode() + if html: + assert ( + "text/html" in response.headers["Content-Type"] + ), "Response is not HTML" + return response + + def _post_url_response(self, url, data, expected_status_code=200, follow=False): + response = self.client.post(url, data, follow=follow) + assert response.status_code == expected_status_code + return response diff --git a/tests/core/tests/admin_integration/test_action_export.py b/tests/core/tests/admin_integration/test_action_export.py new file mode 100644 index 000000000..d21b29b30 --- /dev/null +++ b/tests/core/tests/admin_integration/test_action_export.py @@ -0,0 +1,408 @@ +import warnings +from datetime import datetime +from unittest import mock +from unittest.mock import MagicMock, PropertyMock, patch + +from core.admin import CategoryAdmin +from core.models import Book, Category, UUIDCategory +from core.tests.admin_integration.mixins import AdminTestMixin +from django.contrib import admin +from django.contrib.admin import AdminSite +from django.contrib.auth.models import User +from django.core.exceptions import PermissionDenied +from django.http import HttpRequest +from django.test import RequestFactory +from django.test.testcases import TestCase +from django.test.utils import override_settings +from django.urls import reverse + +from import_export.admin import ExportMixin + + +class ExportActionAdminIntegrationTest(AdminTestMixin, TestCase): + def setUp(self): + super().setUp() + self.cat1 = Category.objects.create(name="Cat 1") + self.cat2 = Category.objects.create(name="Cat 2") + # fields payload for `CategoryResource` - + # to export using `SelectableFieldsExportForm` + self.resource_fields_payload = { + "categoryresource_id": True, + "categoryresource_name": True, + } + + def _check_export_response(self, response): + self.assertContains(response, self.cat1.name, status_code=200) + self.assertNotContains(response, self.cat2.name, status_code=200) + self.assertTrue(response.has_header("Content-Disposition")) + date_str = datetime.now().strftime("%Y-%m-%d") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="Category-{date_str}.csv"', + ) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI=True) + def test_export_skips_export_ui_page(self): + data = { + "action": ["export_admin_action"], + "_selected_action": [str(self.cat1.id)], + } + response = self._post_url_response(self.category_change_url, data) + self._check_export_response(response) + + def test_export_displays_ui_select_page(self): + data = { + "action": ["export_admin_action"], + "_selected_action": [str(self.cat1.id)], + } + response = self._post_url_response(self.category_change_url, data) + self.assertIn("form", response.context) + export_form = response.context["form"] + data = export_form.initial + self.assertEqual([self.cat1.id], data["export_items"]) + self.assertIn("Export 1 selected item.", response.content.decode()) + + def test_export_displays_ui_select_page_multiple_items(self): + data = { + "action": ["export_admin_action"], + "_selected_action": [str(self.cat1.id), str(self.cat2.id)], + } + response = self._post_url_response(self.category_change_url, data) + self.assertIn("form", response.context) + export_form = response.context["form"] + data = export_form.initial + self.assertEqual( + sorted([self.cat1.id, self.cat2.id]), sorted(data["export_items"]) + ) + self.assertIn("Export 2 selected items.", response.content.decode()) + + def test_action_export_model_with_custom_PK(self): + # issue 1800 + cat = UUIDCategory.objects.create(name="UUIDCategory") + data = { + "action": ["export_admin_action"], + "_selected_action": [str(cat.pk)], + } + response = self._post_url_response(self.uuid_category_change_url, data) + self.assertIn("form", response.context) + export_form = response.context["form"] + data = export_form.initial + self.assertEqual([cat.pk], data["export_items"]) + self.assertIn("Export 1 selected item.", response.content.decode()) + + def test_export_post(self): + # create a POST request with data selected from the 'action' export + data = { + "format": "0", + "export_items": [str(self.cat1.id)], + **self.resource_fields_payload, + } + date_str = datetime.now().strftime("%Y-%m-%d") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + response = self._post_url_response(self.category_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="Category-{date_str}.csv"', + ) + target_str = f"id,name\r\n{self.cat1.id},Cat 1\r\n" + self.assertEqual(target_str.encode(), response.content) + + def test_export_admin_action(self): + with mock.patch( + "core.admin.CategoryAdmin.export_admin_action" + ) as mock_export_admin_action: + response = self.client.post( + self.category_change_url, + { + "action": "export_admin_action", + "index": "0", + "selected_across": "0", + "_selected_action": "0", + }, + ) + self.assertTrue(200 <= response.status_code <= 399) + mock_export_admin_action.assert_called() + + def test_export_admin_action_with_restricted_pks(self): + data = { + "format": "0", + "export_items": [str(self.cat1.id)], + **self.resource_fields_payload, + } + # mock returning a set of pks which is not in the submitted range + with mock.patch( + "import_export.admin.ExportMixin.get_valid_export_item_pks" + ) as mock_valid_pks: + mock_valid_pks.return_value = [999] + response = self._post_url_response(self.category_export_url, data) + self.assertIn( + "Select a valid choice. " + f"{self.cat1.id} is not one of the available choices.", + response.content.decode(), + ) + + def test_export_admin_action_with_restricted_pks_deprecated(self): + data = { + "format": "0", + "export_items": [str(self.cat1.id)], + **self.resource_fields_payload, + } + with self.assertWarnsRegex( + DeprecationWarning, + r"The 'get_valid_export_item_pks\(\)' method in " + "core.admin.CategoryAdmin is deprecated and will be removed " + "in a future release", + ): + self._post_url_response(self.category_export_url, data) + + def _perform_export_action_calls_modeladmin_get_queryset_test(self, data): + # Issue #1864 + # ModelAdmin's get_queryset should be used in the ModelAdmin mixins + with ( + mock.patch( + "core.admin.CategoryAdmin.get_queryset" + ) as mock_modeladmin_get_queryset, + mock.patch( + "import_export.admin.ExportMixin.get_data_for_export" + ) as mock_get_data_for_export, + ): + mock_queryset = mock.MagicMock(name="MockQuerySet") + mock_queryset.filter.return_value = mock_queryset + mock_queryset.order_by.return_value = mock_queryset + + mock_modeladmin_get_queryset.return_value = mock_queryset + + self._post_url_response(self.category_export_url, data) + + mock_modeladmin_get_queryset.assert_called() + mock_get_data_for_export.assert_called() + + args, kwargs = mock_get_data_for_export.call_args + mock_get_data_for_export.assert_called_with( + args[0], mock_queryset, **kwargs + ) + + def test_export_action_calls_modeladmin_get_queryset(self): + # Issue #1864 + # Test with specific export items + + data = { + "format": "0", + "export_items": [str(self.cat1.id)], + **self.resource_fields_payload, + } + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._perform_export_action_calls_modeladmin_get_queryset_test(data) + + def test_export_action_calls_modeladmin_get_queryset_all_items(self): + # Issue #1864 + # Test without specific export items + + data = { + "format": "0", + **self.resource_fields_payload, + } + self._perform_export_action_calls_modeladmin_get_queryset_test(data) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_EXPORT_UI=True) + def test_export_action_calls_modeladmin_get_queryset_skip_export_ui(self): + # Issue #1864 + # Test with specific export items and skip UI + + data = { + "format": "0", + "export_items": [str(self.cat1.id)], + **self.resource_fields_payload, + } + self._perform_export_action_calls_modeladmin_get_queryset_test(data) + + def test_get_export_data_raises_PermissionDenied_when_no_export_permission_assigned( + self, + ): + request = MagicMock(spec=HttpRequest) + + class TestMixin(ExportMixin): + model = Book + + def has_export_permission(self, request): + return False + + m = TestMixin() + with self.assertRaises(PermissionDenied): + m.get_export_data("0", request, Book.objects.none()) + + +class TestExportButtonOnChangeForm(AdminTestMixin, TestCase): + def setUp(self): + super().setUp() + self.cat1 = Category.objects.create(name="Cat 1") + self.change_url = reverse( + "%s:%s_%s_change" + % ( + "admin", + "core", + "category", + ), + args=[self.cat1.id], + ) + self.target_str = ( + '' + ) + + def test_export_button_on_change_form(self): + self._get_url_response(self.change_url, str_in_response=self.target_str) + response = self._post_url_response( + self.change_url, data={"_export-item": "Export", "name": self.cat1.name} + ) + self.assertIn("Export 1 selected item", response.content.decode()) + + def test_export_button_on_change_form_for_custom_pk(self): + self.cat1 = UUIDCategory.objects.create(name="Cat 1") + self.change_url = reverse( + "%s:%s_%s_change" + % ( + "admin", + "core", + "uuidcategory", + ), + args=[self.cat1.pk], + ) + response = self.client.get(self.change_url) + self.assertIn(self.target_str, response.content.decode()) + response = self._post_url_response( + self.change_url, data={"_export-item": "Export", "name": self.cat1.name} + ) + self.assertIn("Export 1 selected item", response.content.decode()) + + def test_save_button_on_change_form(self): + # test default behavior is retained when saving an instance ChangeForm + response = self._post_url_response( + self.change_url, data={"_save": "Save", "name": self.cat1.name}, follow=True + ) + target_str = f"The category.*{self.cat1.name}.*was changed successfully." + self.assertRegex(response.content.decode(), target_str) + + def test_export_button_on_change_form_disabled(self): + class MockCategoryAdmin(CategoryAdmin): + show_change_form_export = True + + factory = RequestFactory() + category_admin = MockCategoryAdmin(Category, admin.site) + + request = factory.get(self.change_url) + request.user = self.user + + response = category_admin.change_view(request, str(self.cat1.id)) + response.render() + + self.assertIn(self.target_str, response.content.decode()) + + category_admin.show_change_form_export = False + response = category_admin.change_view(request, str(self.cat1.id)) + response.render() + self.assertNotIn(self.target_str, response.content.decode()) + + +class TestSkipExportFormFromAction(AdminTestMixin, TestCase): + """ + Test config values when export is initiated from the 'Export' action in the action + menu. + """ + + def setUp(self): + super().setUp() + self.cat1 = Category.objects.create(name="Cat 1") + self.queryset = Category.objects.all() + self.model_admin = CategoryAdmin(Category, AdminSite()) + + factory = RequestFactory() + data = { + "action": ["export_admin_action"], + "_selected_action": [str(self.cat1.id)], + } + self.request = factory.post(self.category_change_url, data=data) + self.request.user = User.objects.create_user("admin1") + + def test_skip_export_form_from_action_enabled(self): + self.model_admin.skip_export_form_from_action = True + response = self.model_admin.export_admin_action(self.request, self.queryset) + target_file_contents = "id,name\r\n" f"{self.cat1.id},Cat 1\r\n" + self.assertEqual(target_file_contents.encode(), response.content) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI=True) + def test_skip_export_form_from_action_setting_enabled(self): + response = self.model_admin.export_admin_action(self.request, self.queryset) + target_file_contents = "id,name\r\n" f"{self.cat1.id},Cat 1\r\n" + self.assertEqual(target_file_contents.encode(), response.content) + + +class TestSkipExportFormFromChangeForm(AdminTestMixin, TestCase): + """ + Test config values when export is initiated from the 'Export' button on the Change + form. + """ + + def setUp(self): + super().setUp() + self.cat1 = Category.objects.create(name="Cat 1") + self.queryset = Category.objects.all() + self.model_admin = CategoryAdmin(Category, AdminSite()) + + self.change_url = reverse( + "%s:%s_%s_change" + % ( + "admin", + "core", + "category", + ), + args=[self.cat1.id], + ) + factory = RequestFactory() + self.request = factory.post( + self.change_url, data={"_export-item": "Export", "name": self.cat1.name} + ) + self.request.user = User.objects.create_user("admin1") + + def test_export_button_on_change_form_skip_export_form_from_action_enabled(self): + self.model_admin.skip_export_form_from_action = True + response = self.model_admin.export_admin_action(self.request, self.queryset) + target_file_contents = "id,name\r\n" f"{self.cat1.id},Cat 1\r\n" + self.assertEqual(target_file_contents.encode(), response.content) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI=True) + def test_export_button_on_change_form_skip_export_form_from_action_setting_enabled( + self, + ): + self.model_admin.skip_export_form_from_action = True + response = self.model_admin.export_admin_action(self.request, self.queryset) + target_file_contents = "id,name\r\n" f"{self.cat1.id},Cat 1\r\n" + self.assertEqual(target_file_contents.encode(), response.content) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_EXPORT_UI=True) + def test_export_button_on_change_form_skip_export_setting_enabled(self): + # this property has no effect - IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI + # should be set instead + response = self._post_url_response( + self.change_url, data={"_export-item": "Export", "name": self.cat1.name} + ) + target_re = r"This exporter will export the following fields:" + self.assertRegex(response.content.decode(), target_re) + + def test_export_button_on_change_form_skip_export_form_enabled(self): + # this property has no effect - skip_export_form_from_action + # should be set instead + with patch( + "core.admin.CategoryAdmin.skip_export_form", + new_callable=PropertyMock, + return_value=True, + ): + response = self._post_url_response( + self.change_url, data={"_export-item": "Export", "name": self.cat1.name} + ) + target_re = r"This exporter will export the following fields:" + self.assertRegex(response.content.decode(), target_re) diff --git a/tests/core/tests/admin_integration/test_export.py b/tests/core/tests/admin_integration/test_export.py new file mode 100644 index 000000000..e294d1164 --- /dev/null +++ b/tests/core/tests/admin_integration/test_export.py @@ -0,0 +1,947 @@ +from datetime import date, datetime +from io import BytesIO +from unittest import mock +from unittest.mock import MagicMock, PropertyMock, patch +from zoneinfo import ZoneInfo + +import chardet +import tablib +from core.admin import BookAdmin, BookResource, EBookResource +from core.models import Author, Book, EBook, UUIDCategory +from core.tests.admin_integration.mixins import AdminTestMixin +from core.tests.utils import ignore_utcnow_deprecation_warning +from django import forms +from django.contrib.admin.sites import AdminSite +from django.contrib.admin.views.main import ChangeList +from django.contrib.auth.models import User +from django.core.exceptions import FieldError +from django.http import HttpRequest +from django.test import RequestFactory +from django.test.testcases import TestCase +from django.test.utils import override_settings +from openpyxl.reader.excel import load_workbook +from tablib import Dataset + +from import_export import fields, formats, resources, widgets +from import_export.admin import ExportActionMixin, ExportMixin +from import_export.fields import Field +from import_export.formats.base_formats import XLSX +from import_export.resources import ModelResource + + +class ExportAdminIntegrationTest(AdminTestMixin, TestCase): + def setUp(self) -> None: + super().setUp() + self.bookresource_export_fields_payload = { + "bookresource_id": True, + "bookresource_name": True, + "bookresource_author_email": True, + "bookresource_categories": True, + } + + def test_export(self): + response = self._get_url_response(self.book_export_url) + self.assertNotIn("Export 0 selected items.", response.content.decode()) + form = response.context["form"] + self.assertEqual(2, len(form.fields["resource"].choices)) + + data = {"format": "0", **self.bookresource_export_fields_payload} + date_str = datetime.now().strftime("%Y-%m-%d") + # Should not contain COUNT queries from ModelAdmin.get_results() + with self.assertNumQueries(5): + response = self._post_url_response(self.book_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="Book-{date_str}.csv"', + ) + self.assertEqual( + b"id,name,author_email,categories\r\n", + response.content, + ) + + def test_export_with_skip_export_form_from_action(self): + # setting should have no effect + with patch( + "core.admin.BookAdmin.skip_export_form_from_action", + new_callable=PropertyMock, + return_value=True, + ): + response = self._get_url_response(self.book_export_url) + target_re = r"This exporter will export the following fields:" + self.assertRegex(response.content.decode(), target_re) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI=True) + def test_export_with_skip_export_form_from_action_setting(self): + # setting should have no effect + response = self._get_url_response(self.book_export_url) + target_re = r"This exporter will export the following fields:" + self.assertRegex(response.content.decode(), target_re) + + @mock.patch("core.admin.BookAdmin.get_export_resource_kwargs") + def test_export_passes_export_resource_kwargs( + self, mock_get_export_resource_kwargs + ): + # issue 1738 + mock_get_export_resource_kwargs.return_value = {"a": 1} + self._get_url_response(self.book_export_url) + self.assertEqual(2, mock_get_export_resource_kwargs.call_count) + + def book_resource_init(self, **kwargs): + # stub call to the resource constructor + pass + + @mock.patch.object(BookResource, "__init__", book_resource_init) + def test_export_passes_no_resource_constructor_params(self): + # issue 1716 + # assert that the export call with a no-arg constructor + # does not crash + self._get_url_response(self.book_export_url) + + def test_get_export_queryset(self): + model_admin = BookAdmin(Book, AdminSite()) + + factory = RequestFactory() + request = factory.get(self.book_export_url) + request.user = User.objects.create_user("admin1") + + call_number = 0 + + class MyChangeList(ChangeList): + def get_queryset(self, request): + nonlocal call_number + call_number += 1 + return super().get_queryset(request) + + model_admin.get_changelist = lambda request: MyChangeList + + with patch.object(model_admin, "get_paginator") as mock_get_paginator: + with self.assertNumQueries(4): + queryset = model_admin.get_export_queryset(request) + + mock_get_paginator.assert_not_called() + self.assertEqual(call_number, 1) + + self.assertEqual(queryset.count(), Book.objects.count()) + + def test_get_export_queryset_no_queryset_init(self): + """Test if user has own ChangeList which doesn't store queryset during init""" + model_admin = BookAdmin(Book, AdminSite()) + + factory = RequestFactory() + request = factory.get(self.book_export_url) + request.user = User.objects.create_user("admin1") + + call_number = 0 + + class MyChangeList(ChangeList): + def __init__(self, *args, **kwargs): + self.filter_params = {} + self.model_admin = kwargs.pop("model_admin") + self.list_filter = kwargs.pop("list_filter") + self.model = kwargs.pop("model") + self.date_hierarchy = kwargs.pop("date_hierarchy") + self.root_queryset = self.model_admin.get_queryset(request) + self.list_select_related = kwargs.pop("list_select_related") + self.list_display = kwargs.pop("list_display") + self.lookup_opts = self.model._meta + self.params = {} + self.query = "" + + def get_queryset(self, request): + nonlocal call_number + call_number += 1 + return super().get_queryset(request) + + model_admin.get_changelist = lambda request: MyChangeList + + with patch.object(model_admin, "get_paginator") as mock_get_paginator: + with self.assertNumQueries(4): + queryset = model_admin.get_export_queryset(request) + + mock_get_paginator.assert_not_called() + self.assertEqual(call_number, 1) + + self.assertEqual(queryset.count(), Book.objects.count()) + + def test_get_export_form_single_resource(self): + response = self._get_url_response(self.category_export_url) + content = response.content.decode() + self.assertNotIn("Export 0 selected items.", content) + form = response.context["form"] + self.assertIsInstance(form.fields["resource"].widget, forms.HiddenInput) + self.assertEqual(form.initial["resource"], "0") + + def test_get_export_FieldError(self): + # issue 1723 + with mock.patch("import_export.resources.Resource.export") as mock_export: + mock_export.side_effect = FieldError("some unknown error") + data = { + "format": "0", + "resource": 1, + "booknameresource_id": True, + "booknameresource_name": True, + } + response = self._post_url_response(self.book_export_url, data) + target_msg = "Some unknown error" + self.assertIn(target_msg, response.content.decode()) + + def test_export_second_resource(self): + self._get_url_response( + self.book_export_url, str_in_response="Export/Import only book names" + ) + + data = { + "format": "0", + "resource": 1, + # Second resource is `BookNameResource` + "booknameresource_id": True, + "booknameresource_name": True, + } + date_str = datetime.now().strftime("%Y-%m-%d") + response = self._post_url_response(self.book_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="Book-{date_str}.csv"', + ) + self.assertEqual(b"id,name\r\n", response.content) + + def test_export_displays_resources_fields(self): + response = self._get_url_response(self.book_export_url) + self.assertEqual( + response.context["fields_list"], + [ + ( + "BookResource", + [ + "id", + "name", + "author", + "author_email", + "imported", + "published", + "published_time", + "price", + "added", + "categories", + ], + ), + ("Export/Import only book names", ["id", "name"]), + ], + ) + + @override_settings(EXPORT_FORMATS=[XLSX]) + def test_get_export_form_single_format(self): + response = self._get_url_response(self.category_export_url) + form = response.context["form"] + self.assertEqual(1, len(form.fields["format"].choices)) + self.assertTrue(form.fields["format"].widget.attrs["readonly"]) + content = response.content.decode() + self.assertIn("xlsx", content) + self.assertNotIn('select name="format"', content) + + @override_settings(EXPORT_FORMATS=[]) + def test_export_empty_export_formats(self): + with self.assertRaisesRegex(ValueError, "invalid formats list"): + self._get_url_response(self.category_export_url) + + def test_returns_xlsx_export(self): + response = self._get_url_response(self.book_export_url) + self.assertEqual(response.status_code, 200) + + xlsx_index = self._get_input_format_index("xlsx") + data = {"format": str(xlsx_index), **self.bookresource_export_fields_payload} + response = self._post_url_response(self.book_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual( + response["Content-Type"], + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + ) + + @ignore_utcnow_deprecation_warning + @override_settings(IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT=True) + def test_export_escape_formulae(self): + Book.objects.create(id=1, name="=SUM(1+1)") + Book.objects.create(id=2, name="") + self._get_url_response(self.book_export_url) + + xlsx_index = self._get_input_format_index("xlsx") + data = {"format": str(xlsx_index), **self.bookresource_export_fields_payload} + response = self._post_url_response(self.book_export_url, data) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual("", wb.active["B2"].value) + self.assertEqual("SUM(1+1)", wb.active["B3"].value) + + @override_settings(IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT=True) + def test_export_escape_formulae_csv(self): + b1 = Book.objects.create(id=1, name="=SUM(1+1)") + self._get_url_response(self.book_export_url) + + index = self._get_input_format_index("csv") + data = { + "format": str(index), + "bookresource_id": True, + "bookresource_name": True, + } + response = self._post_url_response(self.book_export_url, data) + self.assertIn( + f"{b1.id},SUM(1+1)\r\n".encode(), + response.content, + ) + + @override_settings(IMPORT_EXPORT_ESCAPE_FORMULAE_ON_EXPORT=False) + def test_export_escape_formulae_csv_false(self): + b1 = Book.objects.create(id=1, name="=SUM(1+1)") + self._get_url_response(self.book_export_url) + + index = self._get_input_format_index("csv") + data = { + "format": str(index), + "bookresource_id": True, + "bookresource_name": True, + } + response = self._post_url_response(self.book_export_url, data) + self.assertIn( + f"{b1.id},=SUM(1+1)\r\n".encode(), + response.content, + ) + + def test_export_model_with_custom_PK(self): + # issue 1800 + UUIDCategory.objects.create(name="UUIDCategory") + response = self._get_url_response(self.uuid_category_export_url) + form = response.context["form"] + self.assertEqual( + form.fields["resource"].choices, + [(0, "UUIDCategoryResource")], + ) + + def test_export_get(self): + """ + Test export view get method. + Test that field checkboxes are displayed with names as discussed under #1846 + """ + response = self._get_url_response(self.ebook_export_url) + self.assertContains( + response, + '", + html=True, + ) + self.assertContains( + response, + '', + html=True, + ) + + def test_export_with_custom_field(self): + # issue 1808 + a = Author.objects.create(id=11, name="Ian Fleming") + data = { + "format": "0", + "author": a.id, + "resource": "", + "ebookresource_id": True, + "ebookresource_author_email": True, + "ebookresource_name": True, + "ebookresource_published": True, + } + date_str = datetime.now().strftime("%Y-%m-%d") + response = self._post_url_response(self.ebook_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="EBook-{date_str}.csv"', + ) + self.assertEqual( + b"id,Email of the author,name,published_date\r\n", response.content + ) + + +class FilteredExportAdminIntegrationTest(AdminTestMixin, TestCase): + fixtures = ["category", "book", "author"] + + def test_export_filters_by_form_param(self): + # issue 1578 + author = Author.objects.get(name="Ian Fleming") + + data = { + "format": "0", + "author": str(author.id), + "ebookresource_id": True, + "ebookresource_author_email": True, + "ebookresource_name": True, + "ebookresource_published": True, + } + date_str = datetime.now().strftime("%Y-%m-%d") + response = self._post_url_response(self.ebook_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="EBook-{date_str}.csv"', + ) + self.assertEqual( + b"id,Email of the author,name,published_date\r\n" + b"5,ian@example.com,The Man with the Golden Gun,1965-04-01\r\n", + response.content, + ) + + +class TestExportEncoding(TestCase): + mock_request = MagicMock(spec=HttpRequest) + mock_request.POST = {"format": 0, "bookresource_id": True} + + class TestMixin(ExportMixin): + model = Book + + def __init__(self, test_str=None): + self.test_str = test_str + + def get_data_for_export(self, request, queryset, **kwargs): + dataset = Dataset(headers=["id", "name"]) + dataset.append([1, self.test_str]) + return dataset + + def get_export_queryset(self, request): + return [] + + def get_export_filename(self, request, queryset, file_format): + return "f" + + def setUp(self): + self.file_format = formats.base_formats.CSV() + self.export_mixin = self.TestMixin(test_str="teststr") + + def test_to_encoding_not_set_default_encoding_is_utf8(self): + self.export_mixin = self.TestMixin(test_str="teststr") + data = self.export_mixin.get_export_data( + self.file_format, self.mock_request, [] + ) + csv_dataset = tablib.import_set(data) + self.assertEqual("teststr", csv_dataset.dict[0]["name"]) + + def test_to_encoding_set(self): + self.export_mixin = self.TestMixin(test_str="ハローワールド") + data = self.export_mixin.get_export_data( + self.file_format, self.mock_request, [], encoding="shift-jis" + ) + encoding = chardet.detect(bytes(data))["encoding"] + self.assertEqual("SHIFT_JIS", encoding) + + def test_to_encoding_set_incorrect(self): + self.export_mixin = self.TestMixin() + with self.assertRaises(LookupError): + self.export_mixin.get_export_data( + self.file_format, + self.mock_request, + [], + encoding="bad-encoding", + ) + + @ignore_utcnow_deprecation_warning + def test_to_encoding_not_set_for_binary_file(self): + self.export_mixin = self.TestMixin(test_str="teststr") + self.file_format = formats.base_formats.XLSX() + data = self.export_mixin.get_export_data( + self.file_format, + self.mock_request, + [], + ) + binary_dataset = tablib.import_set(data) + self.assertEqual("teststr", binary_dataset.dict[0]["name"]) + + def test_export_action_to_encoding(self): + self.export_mixin.to_encoding = "utf-8" + with mock.patch( + "import_export.admin.ExportMixin.get_export_data" + ) as mock_get_export_data: + self.export_mixin.export_action(self.mock_request) + encoding_kwarg = mock_get_export_data.call_args_list[0][1]["encoding"] + self.assertEqual("utf-8", encoding_kwarg) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_ACTION_EXPORT_UI=True) + def test_export_admin_action_to_encoding(self): + class TestExportActionMixin(ExportActionMixin): + def get_export_filename(self, request, queryset, file_format): + return "f" + + self.export_mixin = TestExportActionMixin() + self.export_mixin.to_encoding = "utf-8" + with mock.patch( + "import_export.admin.ExportMixin.get_export_data" + ) as mock_get_export_data: + self.export_mixin.export_admin_action(self.mock_request, []) + encoding_kwarg = mock_get_export_data.call_args_list[0][1]["encoding"] + self.assertEqual("utf-8", encoding_kwarg) + + +class TestSelectableFieldsExportPage(AdminTestMixin, TestCase): + def test_selectable_fields_rendered_with_resource_index_attribute(self) -> None: + response = self._get_url_response(self.book_export_url) + form_resources = response.context["form"].resources + content = response.content.decode() + for index, resource in enumerate(form_resources): + resource_fields = resource().get_export_order() + self.assertEqual( + content.count(f'resource-index="{index}"'), + len(resource_fields), + ) + + +class CustomColumnNameExportTest(AdminTestMixin, TestCase): + """Test export ok when column name is defined in fields list (issue 1828).""" + + def setUp(self): + super().setUp() + self.author = Author.objects.create(id=11, name="Ian Fleming") + self.book = Book.objects.create( + name="Moonraker", author=self.author, published=date(1955, 4, 5) + ) + self.orig_fields = EBookResource._meta.fields + EBookResource._meta.fields = ( + "id", + "author_email", + "name", + "published_date", + "auteur_name", + ) + + def tearDown(self): + super().tearDown() + EBookResource._meta.fields = self.orig_fields + + def test_export_with_custom_field(self): + data = { + "format": "0", + "author": self.author.id, + "resource": "", + "ebookresource_id": True, + "ebookresource_author_email": True, + "ebookresource_name": True, + "ebookresource_published_date": True, + } + date_str = datetime.now().strftime("%Y-%m-%d") + response = self._post_url_response(self.ebook_export_url, data) + self.assertTrue(response.has_header("Content-Disposition")) + self.assertEqual(response["Content-Type"], "text/csv") + self.assertEqual( + response["Content-Disposition"], + f'attachment; filename="EBook-{date_str}.csv"', + ) + s = ( + "id,Email of the author,name,published_date\r\n" + f"{self.book.id},,Moonraker,1955-04-05\r\n" + ) + self.assertEqual(s, response.content.decode()) + + def test_export_with_custom_name(self): + # issue 1893 + data = { + "format": "0", + "author": self.author.id, + "resource": "", + "ebookresource_id": True, + "ebookresource_author_email": True, + "ebookresource_name": True, + "ebookresource_published_date": True, + "ebookresource_auteur_name": True, + } + response = self._post_url_response(self.ebook_export_url, data) + s = ( + "id,Email of the author,name,published_date,Author Name\r\n" + f"{self.book.id},,Moonraker,1955-04-05,Ian Fleming\r\n" + ) + self.assertEqual(s, response.content.decode()) + + +class DeclaredFieldWithAttributeExportTest(AdminTestMixin, TestCase): + """ + If a custom field is declared, export should work + even if no `fields` declaration is present. + (issue 1953) + """ + + class _BookResource(ModelResource): + name = Field(attribute="author__name", column_name="Author Name") + + class Meta: + model = Book + + def setUp(self): + super().setUp() + self.author = Author.objects.create(id=11, name="Ian Fleming") + self.book = Book.objects.create( + name="Moonraker", author=self.author, published=date(1955, 4, 5) + ) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_export_with_declared_author_name_field( + self, mock_choose_export_resource_class + ): + mock_choose_export_resource_class.return_value = self._BookResource + data = { + "format": "0", + "resource": "0", + "bookresource_name": True, + } + response = self._post_url_response(self.book_export_url, data) + s = "Author Name\r\nIan Fleming\r\n" + self.assertEqual(s, response.content.decode()) + + +class DeclaredFieldWithAttributeAndFieldsExportTest(AdminTestMixin, TestCase): + """ + If a custom field is declared, export should work + when `fields` declaration is present. + (issue 1953) + """ + + class _BookResource(ModelResource): + name = Field(attribute="author__name", column_name="Author Name") + + class Meta: + fields = ("name",) + model = Book + + def setUp(self): + super().setUp() + self.author = Author.objects.create(id=11, name="Ian Fleming") + self.book = Book.objects.create( + name="Moonraker", author=self.author, published=date(1955, 4, 5) + ) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_export_with_declared_author_name_field( + self, mock_choose_export_resource_class + ): + mock_choose_export_resource_class.return_value = self._BookResource + data = { + "format": "0", + "resource": "0", + "bookresource_name": True, + } + response = self._post_url_response(self.book_export_url, data) + s = "Author Name\r\nIan Fleming\r\n" + self.assertEqual(s, response.content.decode()) + + +class DeclaredFieldWithNoAttributeExportTest(AdminTestMixin, TestCase): + """ + If a custom field is declared with no attribute the field will be present + but with an empty string. + """ + + class _BookResource(ModelResource): + author_email = Field(column_name="Author Email") + + class Meta: + model = Book + + def setUp(self): + super().setUp() + self.book = Book.objects.create( + name="Moonraker", author_email="ian@fleming.com" + ) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_export_with_declared_author_email_field( + self, mock_choose_export_resource_class + ): + mock_choose_export_resource_class.return_value = self._BookResource + data = {"format": "0", "resource": "0", "bookresource_author_email": True} + response = self._post_url_response(self.book_export_url, data) + s = 'Author Email\r\n""\r\n' + self.assertEqual(s, response.content.decode()) + + +class DeclaredFieldWithIncorrectNameInFieldsExportTest(AdminTestMixin, TestCase): + """ + If a custom field is declared with no attribute the process should not crash + if that field is not in `fields`. + issue #1959 + """ + + def setUp(self): + super().setUp() + self.author = Author.objects.create(id=11, name="Ian Fleming") + self.book = Book.objects.create( + name="Moonraker", author_email="ian@fleming.com", author=self.author + ) + self.orig_fields = EBookResource._meta.fields + EBookResource._meta.fields = ("a",) + + def tearDown(self): + super().tearDown() + EBookResource._meta.fields = self.orig_fields + + def test_export_with_declared_author_email_field(self): + data = { + "format": "0", + "resource": "0", + "ebookresource_id": True, + "ebookresource_a": True, + "author": self.author.id, + } + with self.assertWarns(UserWarning) as w: + response = self._post_url_response(self.ebook_export_url, data) + self.assertEqual( + "cannot identify field for export with name 'a'", + str(w.warnings[-1].message), + ) + s = f"id\r\n{self.book.id}\r\n" + self.assertEqual(s, response.content.decode()) + + +class FilteredExportTest(AdminTestMixin, TestCase): + """ + Tests that exports can be filtered by a custom form field. + This process is demonstrated in the documentation. + """ + + def test_filtered_export(self): + a1 = Author.objects.create(id=11, name="Ian Fleming") + a2 = Author.objects.create(id=12, name="James Joyce") + b1 = Book.objects.create(name="Moonraker", author=a1) + b2 = Book.objects.create(name="Ulysses", author=a2) + self._get_url_response(self.ebook_export_url) + data = { + "format": "0", + "author": a1.id, + "resource": "", + "ebookresource_id": True, + "ebookresource_name": True, + } + response = self._post_url_response(self.ebook_export_url, data) + s = "id,name\r\n" f"{b1.id},Moonraker\r\n" + self.assertEqual(s.encode(), response.content) + + data["author"] = a2.id + response = self._post_url_response(self.ebook_export_url, data) + s = "id,name\r\n" f"{b2.id},Ulysses\r\n" + self.assertEqual(s.encode(), response.content) + + +class SkipExportFormResourceConfigTest(AdminTestMixin, TestCase): + def setUp(self): + super().setUp() + self.model_admin = BookAdmin(EBook, AdminSite()) + + book = Book.objects.create(name="Moonraker", published=date(1955, 4, 5)) + self.target_file_contents = ( + "id,name,author,author_email,imported,published," + "published_time,price,added,categories\r\n" + f"{book.id},Moonraker,,,0,1955-04-05,,,,\r\n" + ) + + factory = RequestFactory() + self.request = factory.get(self.book_export_url, follow=True) + self.request.user = User.objects.create_user("admin1") + + def test_export_skips_export_form(self): + self.model_admin.skip_export_form = True + response = self.model_admin.export_action(self.request) + self._check_export_file_response( + response, self.target_file_contents, file_prefix="EBook" + ) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_EXPORT_UI=True) + def test_export_skips_export_form_setting_enabled(self): + response = self.model_admin.export_action(self.request) + self._check_export_file_response( + response, self.target_file_contents, file_prefix="EBook" + ) + + +class ExportBinaryFieldsTest(AdminTestMixin, TestCase): + # Test that Dates, Booleans, numbers etc are retained as native types + # when exporting to XLSX, XLS, ODS (see #1939) + + class DeclaredModelFieldBookResource(resources.ModelResource): + # declare a field and enforce export output as str (coerce_to_string) + id = fields.Field( + attribute="id", + widget=widgets.NumberWidget(coerce_to_string=True), + ) + imported = fields.Field( + attribute="imported", + widget=widgets.BooleanWidget(coerce_to_string=True), + ) + published = fields.Field( + attribute="published", + widget=widgets.DateWidget("%d.%m.%Y", coerce_to_string=True), + ) + + class Meta: + model = Book + export_order = ("id", "imported", "published") + + def test_dynamic_type_export(self): + Book.objects.create(id=101, published=datetime(2010, 8, 2), imported=True) + data = { + "format": "2", + "bookresource_id": True, + "bookresource_imported": True, + "bookresource_published": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual(101, wb.active["A2"].value) + self.assertEqual(True, wb.active["B2"].value) + self.assertEqual(datetime(2010, 8, 2), wb.active["C2"].value) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_dynamic_export_with_custom_resource( + self, mock_choose_export_resource_class + ): + # Test that `coerce_to_string` is ignored + mock_choose_export_resource_class.return_value = ( + self.DeclaredModelFieldBookResource + ) + Book.objects.create(id=101, published=date(2000, 8, 2), imported=True) + data = { + "format": "2", + "bookresource_id": True, + "bookresource_imported": True, + "bookresource_published": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual(101, wb.active["A2"].value) + self.assertEqual(1, wb.active["B2"].value) + self.assertEqual(datetime(2000, 8, 2), wb.active["C2"].value) + + +@override_settings(USE_TZ=True, TIME_ZONE="UTC") +class ExportTzAwareDateTest(AdminTestMixin, TestCase): + # issue 1995 + # test that tz aware dates do not crash on export + class BookResource_(resources.ModelResource): + + class Meta: + model = Book + fields = ("id", "name", "added") + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_datetime_export_xlsx(self, mock_choose_export_resource_class): + mock_choose_export_resource_class.return_value = self.BookResource_ + date_added = datetime(2024, 11, 8, 14, 40, tzinfo=ZoneInfo("UTC")) + Book.objects.create(id=101, name="Moonraker", added=date_added) + + data = { + "format": "2", + "bookresource_id": True, + "bookresource_added": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual(date_added.replace(tzinfo=None), wb.active["B2"].value) + + @override_settings(TIME_ZONE="Asia/Hong_Kong") + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_datetime_export_xlsx_with_timezone( + self, mock_choose_export_resource_class + ): + mock_choose_export_resource_class.return_value = self.BookResource_ + date_added = datetime(2024, 11, 8, 14, 40, tzinfo=ZoneInfo("Asia/Hong_Kong")) + Book.objects.create(id=101, name="Moonraker", added=date_added) + + data = { + "format": "2", + "bookresource_id": True, + "bookresource_added": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual(date_added.replace(tzinfo=None), wb.active["B2"].value) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_datetime_export_xls(self, mock_choose_export_resource_class): + mock_choose_export_resource_class.return_value = self.BookResource_ + date_added = datetime(2024, 11, 8, 14, 40, tzinfo=ZoneInfo("UTC")) + Book.objects.create(id=101, name="Moonraker", added=date_added) + + data = { + "format": "1", + "bookresource_id": True, + "bookresource_added": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_datetime_export_ods(self, mock_choose_export_resource_class): + mock_choose_export_resource_class.return_value = self.BookResource_ + date_added = datetime(2024, 11, 8, 14, 40, tzinfo=ZoneInfo("UTC")) + Book.objects.create(id=101, name="Moonraker", added=date_added) + + data = { + "format": "4", + "bookresource_id": True, + "bookresource_added": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + + @patch("import_export.mixins.BaseExportMixin.choose_export_resource_class") + def test_datetime_export_empty_field(self, mock_choose_export_resource_class): + mock_choose_export_resource_class.return_value = self.BookResource_ + date_added = None + Book.objects.create(id=101, name="Moonraker", added=date_added) + + data = { + "format": "2", + "bookresource_id": True, + "bookresource_added": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertIsNone(wb.active["B2"].value) + + +class ExportInvalidCharTest(AdminTestMixin, TestCase): + # issue 2000 + + def test_export_xlsx(self): + Book.objects.create(id=101, name="invalid" + chr(11)) + + data = { + "format": "2", + "bookresource_id": True, + "bookresource_name": True, + } + response = self.client.post(self.book_export_url, data) + self.assertIn( + "Export failed due to IllegalCharacterError", response.content.decode() + ) + + @override_settings(IMPORT_EXPORT_ESCAPE_ILLEGAL_CHARS_ON_EXPORT=True) + def test_export_xlsx_with_escape(self): + Book.objects.create(id=101, name="invalid" + chr(11)) + + data = { + "format": "2", + "bookresource_id": True, + "bookresource_name": True, + } + response = self.client.post(self.book_export_url, data) + self.assertEqual(response.status_code, 200) + content = response.content + wb = load_workbook(filename=BytesIO(content)) + self.assertEqual("invalid�", wb.active["B2"].value) diff --git a/tests/core/tests/admin_integration/test_import_encoding.py b/tests/core/tests/admin_integration/test_import_encoding.py new file mode 100644 index 000000000..ffb2d8b8a --- /dev/null +++ b/tests/core/tests/admin_integration/test_import_encoding.py @@ -0,0 +1,182 @@ +from core.tests.admin_integration.mixins import AdminTestMixin +from django.test.testcases import TestCase +from django.test.utils import override_settings + + +class ConfirmImportEncodingTest(AdminTestMixin, TestCase): + """Test handling 'confirm import' step using different file encodings + and storage types. + """ + + def _is_str_in_response(self, filename, input_format, encoding=None): + super()._assert_string_in_response( + self.book_import_url, + filename, + input_format, + encoding=encoding, + str_in_response="test@example.com", + ) + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") + + +class CompleteImportEncodingTest(AdminTestMixin, TestCase): + """Test handling 'complete import' step using different file encodings + and storage types. + """ + + def _is_str_in_response(self, filename, input_format, encoding=None): + response = self._do_import_post( + self.book_import_url, filename, input_format, encoding=encoding + ) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + response = self._post_url_response( + self.book_process_import_url, data, follow=True + ) + self.assertContains( + response, + "Import finished: 1 new, 0 updated, 0 deleted and 0 skipped books.", + ) + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.TempFolderStorage" + ) + def test_import_action_handles_TempFolderStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.CacheStorage" + ) + def test_import_action_handles_CacheStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read(self): + self._is_str_in_response("books.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_mac(self): + self._is_str_in_response("books-mac.csv", "0") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_iso_8859_1(self): + self._is_str_in_response("books-ISO-8859-1.csv", "0", "ISO-8859-1") + + @override_settings( + IMPORT_EXPORT_TMP_STORAGE_CLASS="import_export.tmp_storages.MediaStorage" + ) + def test_import_action_handles_MediaStorage_read_binary(self): + self._is_str_in_response("books.xls", "1") diff --git a/tests/core/tests/admin_integration/test_import_errors.py b/tests/core/tests/admin_integration/test_import_errors.py new file mode 100644 index 000000000..79b48de42 --- /dev/null +++ b/tests/core/tests/admin_integration/test_import_errors.py @@ -0,0 +1,249 @@ +import os +from io import StringIO +from unittest import mock + +from core.admin import BookAdmin, CustomBookAdmin +from core.models import Author, Book, EBook +from core.tests.admin_integration.mixins import AdminTestMixin +from django.contrib.admin.sites import AdminSite +from django.contrib.auth.models import User +from django.test import RequestFactory +from django.test.testcases import TestCase +from django.test.utils import override_settings +from django.utils.translation import gettext_lazy as _ + +from import_export.exceptions import FieldError + + +class ImportErrorHandlingTests(AdminTestMixin, TestCase): + + def test_import_action_handles_UnicodeDecodeError_as_form_error(self): + with mock.patch( + "import_export.admin.TempFolderStorage.read" + ) as mock_tmp_folder_storage: + b_arr = b"\x00" + mock_tmp_folder_storage.side_effect = UnicodeDecodeError( + "codec", b_arr, 1, 2, "fail!" + ) + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertEqual(response.status_code, 200) + target_msg = ( + "'UnicodeDecodeError' encountered while trying to read file. " + "Ensure you have chosen the correct format for the file." + ) + self.assertFormError(response.context["form"], "import_file", target_msg) + + def test_import_action_handles_ValueError_as_form_error(self): + with mock.patch( + "import_export.admin.TempFolderStorage.read" + ) as mock_tmp_folder_storage: + mock_tmp_folder_storage.side_effect = ValueError("some unknown error") + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertEqual(response.status_code, 200) + target_msg = ( + "'ValueError' encountered while trying to read file. " + "Ensure you have chosen the correct format for the file." + ) + self.assertFormError(response.context["form"], "import_file", target_msg) + + def test_import_action_handles_FieldError(self): + # issue 1722 + with mock.patch( + "import_export.resources.Resource._check_import_id_fields" + ) as mock_check_import_id_fields: + mock_check_import_id_fields.side_effect = FieldError("some unknown error") + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertEqual(response.status_code, 200) + target_msg = "some unknown error" + self.assertIn(target_msg, response.content.decode()) + + @override_settings(LANGUAGE_CODE="es") + def test_import_action_handles_ValueError_as_form_error_with_translation(self): + with mock.patch( + "import_export.admin.TempFolderStorage.read" + ) as mock_tmp_folder_storage: + mock_tmp_folder_storage.side_effect = ValueError("some unknown error") + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertEqual(response.status_code, 200) + target_msg = ( + "Se encontró 'ValueError' mientras se intentaba leer el archivo. " + "Asegúrese que seleccionó el formato correcto para el archivo." + ) + self.assertFormError(response.context["form"], "import_file", target_msg) + + def test_import_with_customized_form_handles_form_validation(self): + """Test if admin import handles errors gracefully when confirm_form is + invalid for eg. if a required field (in this case 'Author') is left blank. + """ + # We use customized BookAdmin (CustomBookAdmin) with modified import + # form, which requires Author to be selected (from available authors). + # Note that url is /admin/core/ebook/import (and not: ...book/import)! + + # We need a author in the db to select from in the admin import custom + # forms, first we will submit data with invalid author_id and if the + # error is handled correctly, resubmit form with correct author_id and + # check if data is imported successfully + Author.objects.create(id=11, name="Test Author") + + # GET the import form + response = self._get_url_response( + self.ebook_import_url, str_in_response='form action=""' + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + # POST the import form + input_format = "0" + filename = os.path.join( + os.path.dirname(__file__), + os.path.pardir, + os.path.pardir, + "exports", + "books.csv", + ) + with open(filename, "rb") as fobj: + data = {"author": 11, "format": input_format, "import_file": fobj} + response = self._post_url_response(self.ebook_import_url, data) + + self.assertIn("result", response.context) + self.assertFalse(response.context["result"].has_errors()) + self.assertIn("confirm_form", response.context) + confirm_form = response.context["confirm_form"] + self.assertIsInstance( + confirm_form, + CustomBookAdmin(EBook, "ebook/import").get_confirm_form_class(None), + ) + + data = confirm_form.initial + self.assertEqual(data["original_file_name"], "books.csv") + + # manipulate data to make the payload invalid + data["author"] = "" + response = self._post_url_response( + self.ebook_process_import_url, data, follow=True + ) + + # check if error is captured gracefully + self.assertEqual( + response.context["errors"], {"author": ["This field is required."]} + ) + + # resubmit with valid data + data["author"] = 11 + response = self._post_url_response( + self.ebook_process_import_url, data, follow=True + ) + self.assertEqual(response.status_code, 200) + self.assertContains( + response, + _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format(1, 0, 0, 0, EBook._meta.verbose_name_plural), + ) + + def test_import_action_invalid_date(self): + # test that a row with an invalid date redirects to errors page + index = self._get_input_format_index("csv") + response = self._do_import_post( + self.book_import_url, "books-invalid-date.csv", index + ) + result = response.context["result"] + # there should be a single invalid row + self.assertEqual(1, len(result.invalid_rows)) + self.assertEqual( + "Value could not be parsed using defined formats.", + result.invalid_rows[0].error.messages[0], + ) + # no rows should be imported because we rollback on validation errors + self.assertEqual(0, Book.objects.count()) + + def test_import_action_error_on_save(self): + with mock.patch("core.models.Book.save") as mock_save: + mock_save.side_effect = ValueError("some unknown error") + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertIn("some unknown error", response.content.decode()) + + def test_import_action_invalidates_data_sheet_with_no_headers_or_data(self): + # GET the import form + response = self._get_url_response( + self.book_import_url, str_in_response='form action=""' + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + + response = self._do_import_post( + self.book_import_url, "books-no-headers.csv", input_format=0 + ) + self.assertEqual(response.status_code, 200) + target_msg = ( + "No valid data to import. Ensure your file " + "has the correct headers or data for import." + ) + self.assertFormError(response.context["form"], "import_file", target_msg) + + +class TestImportErrorMessageFormat(AdminTestMixin, TestCase): + # issue 1724 + + def setUp(self): + super().setUp() + self.csvdata = "id,name,author\r\n" "1,Ulysses,666\r\n" + self.filedata = StringIO(self.csvdata) + self.data = {"format": "0", "import_file": self.filedata} + self.model_admin = BookAdmin(Book, AdminSite()) + + factory = RequestFactory() + self.request = factory.post(self.book_import_url, self.data, follow=True) + self.request.user = User.objects.create_user("admin1") + + def test_result_error_display_default(self): + response = self.model_admin.import_action(self.request) + response.render() + content = response.content.decode() + self.assertIn("import-error-display-message", content) + self.assertIn( + "Line number: 1 - Author matching query does not exist.", + content, + ) + self.assertNotIn("import-error-display-row", content) + self.assertNotIn("import-error-display-traceback", content) + + def test_result_error_display_message_only(self): + self.model_admin.import_error_display = ("message",) + + response = self.model_admin.import_action(self.request) + response.render() + content = response.content.decode() + self.assertIn( + "Line number: 1 - Author matching query does not exist.", + content, + ) + self.assertIn("import-error-display-message", content) + self.assertNotIn("import-error-display-row", content) + self.assertNotIn("import-error-display-traceback", content) + + def test_result_error_display_row_only(self): + self.model_admin.import_error_display = ("row",) + + response = self.model_admin.import_action(self.request) + response.render() + content = response.content.decode() + self.assertNotIn( + "Line number: 1 - Author matching query does not exist.", + content, + ) + self.assertNotIn("import-error-display-message", content) + self.assertIn("import-error-display-row", content) + self.assertNotIn("import-error-display-traceback", content) + + def test_result_error_display_traceback_only(self): + self.model_admin.import_error_display = ("traceback",) + + response = self.model_admin.import_action(self.request) + response.render() + content = response.content.decode() + self.assertNotIn( + "Line number: 1 - Author matching query does not exist.", + content, + ) + self.assertNotIn("import-error-display-message", content) + self.assertNotIn("import-error-display-row", content) + self.assertIn("import-error-display-traceback", content) + self.assertIn("Traceback (most recent call last)", content) diff --git a/tests/core/tests/admin_integration/test_import_functionality.py b/tests/core/tests/admin_integration/test_import_functionality.py new file mode 100644 index 000000000..48ef9383b --- /dev/null +++ b/tests/core/tests/admin_integration/test_import_functionality.py @@ -0,0 +1,576 @@ +from unittest import mock +from unittest.mock import PropertyMock, patch + +from core.admin import BookAdmin, EBookResource, ImportMixin +from core.models import Author, Book, Parent +from core.tests.admin_integration.mixins import AdminTestMixin +from django.contrib.admin.models import DELETION, LogEntry +from django.core.exceptions import ValidationError +from django.test.testcases import TestCase, TransactionTestCase +from django.test.utils import override_settings +from django.utils.translation import gettext_lazy as _ + +from import_export.admin import ExportMixin +from import_export.formats import base_formats +from import_export.resources import ModelResource + + +class ImportAdminIntegrationTest(AdminTestMixin, TestCase): + + @patch( + "core.admin.BookAdmin.skip_import_confirm", + new_callable=PropertyMock, + return_value=True, + ) + def test_import_skips_confirm_page(self, mock_skip_import_confirm): + response = self._do_import_post(self.book_import_url, "books.csv", follow=True) + self.assertEqual(response.status_code, 200) + self.assertContains( + response, + _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format(1, 0, 0, 0, Book._meta.verbose_name_plural), + ) + + def test_delete_from_admin(self): + # test delete from admin site (see #432) + + # create a book which can be deleted + b = Book.objects.create(id=1) + + response = self._do_import_post(self.book_import_url, "books-for-delete.csv") + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + self._post_url_response(self.book_process_import_url, data, follow=True) + + # check the LogEntry was created as expected + deleted_entry = LogEntry.objects.latest("id") + self.assertEqual("delete through import_export", deleted_entry.change_message) + self.assertEqual(DELETION, deleted_entry.action_flag) + self.assertEqual(b.id, int(deleted_entry.object_id)) + self.assertEqual("", deleted_entry.object_repr) + + @override_settings(TEMPLATE_STRING_IF_INVALID="INVALID_VARIABLE") + @patch("import_export.admin.ImportMixin.choose_import_resource_class") + def test_import_passes_correct_kwargs_to_constructor( + self, mock_choose_import_resource_class + ): + # issue 1741 + class TestResource(ModelResource): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + # the form is passed as a kwarg to the Resource constructor + # if not present, then it means that the original kwargs were lost + if "form" not in kwargs: + raise Exception("No form") + + class Meta: + model = Book + fields = ("id",) + + # mock the returned resource class so that we can inspect constructor params + mock_choose_import_resource_class.return_value = TestResource + + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertEqual(response.status_code, 200) + + def test_get_tmp_storage_class_attribute(self): + """Mock dynamically loading a class defined by an attribute""" + target = "SomeClass" + m = ImportMixin() + m.tmp_storage_class = "tmpClass" + with mock.patch("import_export.admin.import_string") as mock_import_string: + mock_import_string.return_value = target + self.assertEqual(target, m.get_tmp_storage_class()) + + def test_get_import_data_kwargs_with_form_kwarg(self): + """ + Test that if the method is called with a 'form' kwarg, + then it is removed and the updated dict is returned + """ + m = ImportMixin() + kw = {"a": 1, "form": "some_form"} + target = {"a": 1} + self.assertEqual(target, m.get_import_data_kwargs(**kw)) + + def test_get_import_data_kwargs_with_no_form_kwarg_returns_kwarg_dict(self): + """ + Test that if the method is called with no 'form' kwarg, + then an empty dict is returned + """ + m = ImportMixin() + kw = { + "a": 1, + } + target = {"a": 1} + self.assertEqual(target, m.get_import_data_kwargs(**kw)) + + def test_get_context_data_returns_empty_dict(self): + m = ExportMixin() + self.assertEqual({}, m.get_context_data()) + + @override_settings(IMPORT_FORMATS=[base_formats.XLSX, base_formats.XLS]) + def test_import_admin_uses_import_format_settings(self): + """ + Test that import form only avails the formats provided by the + IMPORT_FORMATS setting + """ + request = self._get_url_response(self.book_import_url).wsgi_request + mock_site = mock.MagicMock() + import_form = BookAdmin(Book, mock_site).create_import_form(request) + + file_format = import_form.fields["format"] + choices = file_format.choices + + self.assertEqual(len(choices), 3) + self.assertEqual(choices[0][1], "---") + self.assertEqual(choices[1][1], "xlsx") + self.assertEqual(choices[2][1], "xls") + + @override_settings(IMPORT_FORMATS=[]) + def test_export_empty_import_formats(self): + with self.assertRaisesRegex(ValueError, "invalid formats list"): + self._get_url_response(self.book_import_url) + + +class ImportFileHandlingTests(AdminTestMixin, TestCase): + + @override_settings(TEMPLATE_STRING_IF_INVALID="INVALID_VARIABLE") + def test_import(self): + # GET the import form + response = self._get_url_response( + self.book_import_url, str_in_response='form action=""' + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertIn("result", response.context) + self.assertFalse(response.context["result"].has_errors()) + self.assertIn("confirm_form", response.context) + confirm_form = response.context["confirm_form"] + + data = confirm_form.initial + self.assertEqual(data["original_file_name"], "books.csv") + response = self._post_url_response( + self.book_process_import_url, data, follow=True + ) + self.assertContains( + response, + _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format(1, 0, 0, 0, Book._meta.verbose_name_plural), + ) + + def test_import_mac(self): + # GET the import form + response = self._get_url_response( + self.book_import_url, str_in_response='form action=""' + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + + response = self._do_import_post(self.book_import_url, "books-mac.csv") + self.assertIn("result", response.context) + self.assertFalse(response.context["result"].has_errors()) + self.assertIn("confirm_form", response.context) + confirm_form = response.context["confirm_form"] + + data = confirm_form.initial + self.assertEqual(data["original_file_name"], "books-mac.csv") + response = self._post_url_response( + self.book_process_import_url, data, follow=True + ) + self.assertContains( + response, + _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format(1, 0, 0, 0, Book._meta.verbose_name_plural), + ) + + @override_settings(TEMPLATE_STRING_IF_INVALID="INVALID_VARIABLE") + def test_import_second_resource(self): + Book.objects.create(id=1) + + # GET the import form + response = self._get_url_response( + self.book_import_url, str_in_response="Export/Import only book names" + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + self.assertContains(response, 'form action=""') + + response = self._do_import_post(self.book_import_url, "books.csv", resource=1) + self.assertIn("result", response.context) + self.assertFalse(response.context["result"].has_errors()) + self.assertIn("confirm_form", response.context) + confirm_form = response.context["confirm_form"] + + data = confirm_form.initial + self.assertEqual(data["original_file_name"], "books.csv") + response = self._post_url_response( + self.book_process_import_url, data, follow=True + ) + self.assertContains( + response, + _( + "Import finished: {} new, {} updated, {} deleted and {} skipped {}." + ).format(0, 1, 0, 0, Book._meta.verbose_name_plural), + ) + # Check, that we really use second resource - author_email didn't get imported + self.assertEqual(Book.objects.get(id=1).author_email, "") + + +class ImportLogEntryTest(AdminTestMixin, TestCase): + def test_import_log_entry(self): + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + self._post_url_response(self.book_process_import_url, data, follow=True) + book = LogEntry.objects.latest("id") + self.assertEqual(book.object_repr, "Some book") + self.assertEqual(book.object_id, str(1)) + + def test_import_log_entry_with_fk(self): + Parent.objects.create(id=1234, name="Some Parent") + response = self._do_import_post(self.child_import_url, "child.csv") + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + self._post_url_response(self.child_process_import_url, data, follow=True) + child = LogEntry.objects.latest("id") + self.assertEqual(child.object_repr, "Some - child of Some Parent") + self.assertEqual(child.object_id, str(1)) + + @patch("import_export.resources.Resource.skip_row") + def test_import_log_entry_skip_row(self, mock_skip_row): + # test issue 1937 - ensure that skipped rows do not create log entries + mock_skip_row.return_value = True + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + self._post_url_response(self.book_process_import_url, data, follow=True) + self.assertEqual(0, LogEntry.objects.count()) + + def test_import_log_entry_error_row(self): + # ensure that error rows do not create log entries + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + with mock.patch("core.admin.BookResource.skip_row") as mock_skip: + mock_skip.side_effect = ValueError("some unknown error") + self._post_url_response(self.book_process_import_url, data, follow=True) + self.assertEqual(0, LogEntry.objects.count()) + + def test_import_log_entry_validation_error_row(self): + # ensure that validation error rows do not create log entries + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + with mock.patch("core.admin.BookResource.skip_row") as mock_skip: + mock_skip.side_effect = ValidationError("some unknown error") + self._post_url_response(self.book_process_import_url, data, follow=True) + self.assertEqual(0, LogEntry.objects.count()) + + @override_settings(IMPORT_EXPORT_SKIP_ADMIN_LOG=True) + def test_import_log_entry_skip_admin_log(self): + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + self._post_url_response(self.book_process_import_url, data, follow=True) + self.assertEqual(0, LogEntry.objects.count()) + + def test_import_log_entry_skip_admin_log_attr(self): + response = self._do_import_post(self.book_import_url, "books.csv") + + self.assertEqual(response.status_code, 200) + confirm_form = response.context["confirm_form"] + data = confirm_form.initial + with mock.patch( + "import_export.admin.ImportMixin.skip_admin_log", + new_callable=PropertyMock, + return_value=True, + ): + self._post_url_response(self.book_process_import_url, data, follow=True) + self.assertEqual(0, LogEntry.objects.count()) + + +@override_settings(IMPORT_EXPORT_SKIP_ADMIN_CONFIRM=True) +class TestImportSkipConfirm(AdminTestMixin, TransactionTestCase): + fixtures = ["author"] + + def _is_str_in_response( + self, + filename, + input_format, + encoding=None, + str_in_response=None, + follow=False, + status_code=200, + ): + response = self._do_import_post( + self.book_import_url, + filename, + input_format, + encoding=encoding, + follow=follow, + ) + self.assertEqual(response.status_code, status_code) + if str_in_response is not None: + self.assertContains(response, str_in_response) + + def _is_regex_in_response( + self, + filename, + input_format, + encoding=None, + regex_in_response=None, + follow=False, + status_code=200, + ): + response = self._do_import_post( + self.book_import_url, + filename, + input_format, + encoding=encoding, + follow=follow, + ) + self.assertEqual(response.status_code, status_code) + if regex_in_response is not None: + self.assertRegex(response.content.decode(), regex_in_response) + + def test_import_action_create(self): + self._is_str_in_response( + "books.csv", + "0", + follow=True, + str_in_response="Import finished: 1 new, 0 updated, " + + "0 deleted and 0 skipped books.", + ) + self.assertEqual(1, Book.objects.count()) + + def test_import_action_error_on_save(self): + with mock.patch("core.models.Book.save") as mock_save: + mock_save.side_effect = ValueError("some unknown error") + response = self._do_import_post(self.book_import_url, "books.csv") + self.assertIn("some unknown error", response.content.decode()) + + @override_settings(IMPORT_EXPORT_USE_TRANSACTIONS=True) + def test_import_transaction_enabled_validation_error(self): + # with transactions enabled, a validation error should cause the entire + # import to be rolled back + self._do_import_post(self.book_import_url, "books-invalid-date.csv") + self.assertEqual(0, Book.objects.count()) + + @override_settings(IMPORT_EXPORT_USE_TRANSACTIONS=False) + def test_import_transaction_disabled_validation_error(self): + # with transactions disabled, a validation error should not cause the entire + # import to fail + self._do_import_post(self.book_import_url, "books-invalid-date.csv") + self.assertEqual(1, Book.objects.count()) + + @override_settings(IMPORT_EXPORT_USE_TRANSACTIONS=True) + def test_import_transaction_enabled_core_error(self): + # test that if we send a file with multiple rows, + # and transactions is enabled, a core error means that + # no instances are persisted + index = self._get_input_format_index("json") + with mock.patch("core.admin.BookResource.skip_row") as mock_skip: + mock_skip.side_effect = [None, ValueError("some unknown error"), None] + response = self._do_import_post(self.book_import_url, "books.json", index) + self.assertIn("some unknown error", response.content.decode()) + self.assertEqual(0, Book.objects.count()) + + @override_settings(IMPORT_EXPORT_USE_TRANSACTIONS=False) + def test_import_transaction_disabled_core_error(self): + # with transactions disabled, a core (db constraint) error should not cause the + # entire import to fail + index = self._get_input_format_index("json") + with mock.patch("core.admin.BookResource.skip_row") as mock_skip: + mock_skip.side_effect = [None, ValueError("some unknown error"), None] + response = self._do_import_post(self.book_import_url, "books.json", index) + self.assertIn("some unknown error", response.content.decode()) + self.assertEqual(2, Book.objects.count()) + + def test_import_action_mac(self): + self._is_str_in_response( + "books-mac.csv", + "0", + follow=True, + str_in_response="Import finished: 1 new, 0 updated, " + + "0 deleted and 0 skipped books.", + ) + + def test_import_action_iso_8859_1(self): + self._is_str_in_response( + "books-ISO-8859-1.csv", + "0", + "ISO-8859-1", + follow=True, + str_in_response="Import finished: 1 new, 0 updated, " + + "0 deleted and 0 skipped books.", + ) + + def test_import_action_decode_error(self): + # attempting to read a file with the incorrect encoding should raise an error + self._is_regex_in_response( + "books-ISO-8859-1.csv", + "0", + follow=True, + encoding="utf-8-sig", + regex_in_response=( + ".*UnicodeDecodeError.* encountered " "while trying to read file" + ), + ) + + def test_import_action_binary(self): + self._is_str_in_response( + "books.xls", + "1", + follow=True, + str_in_response="Import finished: 1 new, 0 updated, " + + "0 deleted and 0 skipped books.", + ) + + +class ConfirmImportPreviewOrderTest(AdminTestMixin, TestCase): + """Test preview order displayed correctly (issue 1784).""" + + fixtures = ["author"] + + def test_import_preview_order(self): + author_id = Author.objects.first().id + response = self._do_import_post( + self.ebook_import_url, + "ebooks.csv", + input_format="0", + data={"author": author_id}, + ) + # test header rendered in correct order + target_header_re = ( + r"[\\n\s]+" + r"[\\n\s]+" + r"[\\n\s]+" + r"id[\\n\s]+" + r"Email of the author[\\n\s]+" + r"name[\\n\s]+" + r"published_date[\\n\s]+" + r"Author Name[\\n\s]+" + r"[\\n\s]+" + "" + ) + self.assertRegex(response.content.decode(), target_header_re) + # test row rendered in correct order + target_row_re = ( + r'[\\n\s]+' + r'[\\n\s]+New[\\n\s]+[\\n\s]+' + r'1[\\n\s]+' + r'test@example.com[\\n\s]+' + r'Some book[\\n\s]+' + r"[\\n\s]+" + r"[\\n\s]+" + "" + ) + self.assertRegex(response.content.decode(), target_row_re) + + +class CustomColumnNameImportTest(AdminTestMixin, TestCase): + """Handle custom column name import (issue 1822).""" + + fixtures = ["author"] + + def setUp(self): + super().setUp() + EBookResource._meta.fields = ("id", "author_email", "name", "published_date") + + def tearDown(self): + super().tearDown() + EBookResource._meta.fields = ("id", "author_email", "name", "published") + + def test_import_preview_order(self): + author_id = Author.objects.first().id + response = self._do_import_post( + self.ebook_import_url, + "ebooks.csv", + input_format="0", + data={"author": author_id}, + ) + # test header rendered in correct order + target_header_re = ( + r"[\\n\s]+" + r"[\\n\s]+" + r"[\\n\s]+" + r"id[\\n\s]+" + r"Email of the author[\\n\s]+" + r"name[\\n\s]+" + r"published_date[\\n\s]+" + r"Author Name[\\n\s]+" + r"[\\n\s]+" + "" + ) + self.assertRegex(response.content.decode(), target_header_re) + # test row rendered in correct order + target_row_re = ( + r'[\\n\s]+' + r'[\\n\s]+New[\\n\s]+[\\n\s]+' + r'1[\\n\s]+' + r'test@example.com[\\n\s]+' + r'Some book[\\n\s]+' + r"[\\n\s]+" + r"[\\n\s]+" + "" + ) + self.assertRegex(response.content.decode(), target_row_re) + + +class DefaultFieldsImportOrderTest(AdminTestMixin, TestCase): + """ + Display correct import order based on default 'fields' declaration (issue 1845). + Ensure that the prompt text on the import page renders the + fields in the correct order. + """ + + def test_import_preview_order(self): + response = self._get_url_response(self.ebook_import_url) + # test display rendered in correct order + target_re = ( + r"This importer will import the following fields:[\\n\s]+" + r"id, Email of the author, name, published_date, Author Name" + r"[\\n\s]+" + ) + self.assertRegex(response.content.decode(), target_re) + + +class DeclaredImportOrderTest(AdminTestMixin, TestCase): + """ + Display correct import order when 'import_order' is declared (issue 1845). + Ensure that the prompt text on the import page renders the + fields in the correct order. + """ + + def setUp(self): + super().setUp() + EBookResource._meta.import_order = ("id", "name", "published", "author_email") + + def tearDown(self): + super().tearDown() + EBookResource._meta.import_order = () + + def test_import_preview_order(self): + response = self._get_url_response(self.ebook_import_url) + # test display rendered in correct order + target_re = ( + r"This importer will import the following fields:[\\n\s]+" + r"id, name, published_date, Email of the author, Author Name" + r"[\\n\s]+" + ) + self.assertRegex(response.content.decode(), target_re) diff --git a/tests/core/tests/admin_integration/test_import_security.py b/tests/core/tests/admin_integration/test_import_security.py new file mode 100644 index 000000000..70e89b866 --- /dev/null +++ b/tests/core/tests/admin_integration/test_import_security.py @@ -0,0 +1,47 @@ +import os + +from core.admin import AuthorAdmin, BookAdmin +from core.tests.admin_integration.mixins import AdminTestMixin +from django.test.testcases import TestCase +from django.utils.translation import gettext_lazy as _ + + +class ImportAdminSecurityTests(AdminTestMixin, TestCase): + + def test_csrf(self): + self._get_url_response(self.book_process_import_url, expected_status_code=405) + + def test_import_file_name_in_tempdir(self): + # 65 - import_file_name form field can be use to access the filesystem + import_file_name = os.path.join( + os.path.dirname(__file__), os.path.pardir, "exports", "books.csv" + ) + data = { + "format": "0", + "import_file_name": import_file_name, + "original_file_name": "books.csv", + } + with self.assertRaises(FileNotFoundError): + self._post_url_response(self.book_process_import_url, data) + + def test_import_buttons_visible_without_add_permission(self): + # When using ImportMixin, users should be able to see the import button + # without add permission (to be consistent with ImportExportMixin) + + original = AuthorAdmin.has_add_permission + AuthorAdmin.has_add_permission = lambda self, request: False + response = self._get_url_response(self.core_author_url) + AuthorAdmin.has_add_permission = original + + self.assertContains(response, _("Import")) + self.assertTemplateUsed(response, self.change_list_url) + + def test_import_export_buttons_visible_without_add_permission(self): + # issue 38 - Export button not visible when no add permission + original = BookAdmin.has_add_permission + BookAdmin.has_add_permission = lambda self, request: False + response = self._get_url_response(self.book_import_url) + BookAdmin.has_add_permission = original + + self.assertContains(response, _("Export")) + self.assertContains(response, _("Import")) diff --git a/tests/core/tests/admin_integration/test_import_templates.py b/tests/core/tests/admin_integration/test_import_templates.py new file mode 100644 index 000000000..3dcacecc0 --- /dev/null +++ b/tests/core/tests/admin_integration/test_import_templates.py @@ -0,0 +1,134 @@ +import os +from unittest.mock import patch + +from core.admin import CustomBookAdmin, ImportMixin +from core.models import Author, EBook +from core.tests.admin_integration.mixins import AdminTestMixin +from django.test.testcases import TestCase +from django.test.utils import override_settings +from django.utils.translation import gettext_lazy as _ + + +class ImportTemplateTests(AdminTestMixin, TestCase): + + def test_import_export_template(self): + response = self._get_url_response(self.core_book_url) + self.assertTemplateUsed(response, self.change_list_template_url) + self.assertTemplateUsed(response, self.change_list_url) + self.assertTemplateUsed(response, self.change_list_url) + self.assertContains(response, _("Import")) + self.assertContains(response, _("Export")) + self.assertContains(response, "Custom change list item") + + @patch("import_export.admin.logger") + def test_issue_1521_change_list_template_as_property(self, mock_logger): + # Test that a warning is logged when change_list_template is a property + class TestImportCls(ImportMixin): + @property + def change_list_template(self): + return ["x"] + + TestImportCls() + mock_logger.warning.assert_called_once_with( + "failed to assign change_list_template attribute" + ) + + @override_settings(DEBUG=True) + def test_correct_scripts_declared_when_debug_is_true(self): + # GET the import form + response = self._get_url_response( + self.book_import_url, str_in_response="form action=" + ) + self.assertTemplateUsed(response, self.admin_import_template_url) + self.assertContains( + response, + '', + ) + ) + + def test_export_html_escape(self): + res = self.format.export_data(self.dataset) + self.assertIn( + ( + "1" + "good_user" + "John Doe" + "2" + "evil_user" + '<script>alert("I want to steal your credit card data")' + "</script>" + ), + res, + ) + + +class YAMLFormatTest(TestCase): + def test_numeric_widget_export(self): + dataset = tablib.Dataset(headers=["id", "username"]) + dataset.append((NumberWidget().render(1), "x")) + res = base_formats.YAML().export_data(dataset) + self.assertEqual("- {id: '1', username: x}\n", res) diff --git a/tests/core/tests/test_command_export.py b/tests/core/tests/test_command_export.py new file mode 100644 index 000000000..d1356cd5c --- /dev/null +++ b/tests/core/tests/test_command_export.py @@ -0,0 +1,46 @@ +from io import BytesIO, StringIO, TextIOWrapper +from unittest.mock import Mock + +from core.models import Book +from django.core.management import call_command +from django.test import TestCase + + +class ExportCommandTest(TestCase): + def setUp(self): + self.out = TextIOWrapper(BytesIO()) + + def test_export_command_as_csv(self): + Book.objects.create(id=100, name="Some book") + + call_command("export", "CSV", "core.Book", stdout=self.out) + + self.out.seek(0) + data = self.out.read() + self.assertEqual( + data, + "id,name,author,author_email,imported,published,published_time,price,added,categories\n100,Some book,,,0,,,,,\n", # noqa + ) + + def test_export_command_as_csv_with_encoding(self): + Book.objects.create(id=100, name="Some book") + + call_command("export", "CSV", "core.Book", stdout=self.out, encoding="cp1250") + + self.out.seek(0) + data = self.out.read() + self.assertEqual( + data, + "id,name,author,author_email,imported,published,published_time,price,added,categories\n100,Some book,,,0,,,,,\n", # noqa + ) + + def test_export_command_binary_data(self): + Book.objects.create(id=100, name="Some book") + out = TextIOWrapper(BytesIO()) + err = StringIO() + out.isatty = Mock(return_value=True) + + with self.assertRaises(SystemExit): + call_command("export", "xls", "core.Book", stdout=out, stderr=err) + + assert "This is a binary format" in err.getvalue() diff --git a/tests/core/tests/test_command_import.py b/tests/core/tests/test_command_import.py new file mode 100644 index 000000000..f1cf13bc4 --- /dev/null +++ b/tests/core/tests/test_command_import.py @@ -0,0 +1,154 @@ +import tempfile +from io import BytesIO, StringIO, TextIOWrapper +from unittest import mock +from unittest.mock import patch + +from core.models import Book +from django.core.management import call_command +from django.core.management.base import CommandError +from django.test import TestCase + +from import_export.formats.base_formats import XLSX +from import_export.resources import ModelResource, modelresource_factory + +CSV_CONTENT = """\ +id,name,author,author_email,imported,published,published_time,price,added,categories +1,Some book updat,,test@example.com,0,,,10.50,,1 +""" + +CSV_CONTENT_WITH_ERRORS = """\ +id,name,author,author_email,imported,published,published_time,price,added,categories +Some book updat,,test@example.com,0,,,10.50,,1 +""" + + +class BookResourceWithError(ModelResource): + def before_import(self, *args, **kwargs): + raise Exception("Import base errors") + + class Meta: + model = Book + + +class ImportCommandTest(TestCase): + def setUp(self): + self.out = StringIO() + self.err = StringIO() + + def test_import_command_with_csv(self): + with tempfile.NamedTemporaryFile(mode="w+", suffix=".csv") as tmp_csv: + tmp_csv.write(CSV_CONTENT) + tmp_csv.seek(0) + call_command( + "import", + "core.Book", + tmp_csv.name, + stdout=self.out, + stderr=self.err, + interactive=False, + ) + + self.assertEqual(Book.objects.count(), 1) + + @patch("sys.stdin", new_callable=lambda: TextIOWrapper(BytesIO())) + def test_import_command_with_stdin(self, mock_stdin): + mock_stdin.write(CSV_CONTENT) + mock_stdin.seek(0) + + call_command( + "import", + "core.Book", + "-", + stdout=self.out, + stderr=self.err, + format="CSV", + interactive=False, + ) + + self.assertEqual(Book.objects.count(), 1) + + @patch("sys.stdin", new_callable=lambda: TextIOWrapper(BytesIO())) + def test_import_command_with_stdin_binary_format(self, mock_stdin): + # create binary export data + resource = modelresource_factory(Book)() + data = resource.export() + export_data = XLSX().export_data(data) + mock_stdin.buffer.write(export_data) + mock_stdin.seek(0) + + call_command( + "import", + "core.Book", + "-", + stdout=self.out, + stderr=self.err, + format="XLSX", + interactive=False, + ) + + def test_import_command_dry_run(self): + with tempfile.NamedTemporaryFile(mode="w+", suffix=".csv") as tmp_csv: + tmp_csv.write(CSV_CONTENT) + tmp_csv.seek(0) + call_command( + "import", + "core.Book", + tmp_csv.name, + stdout=self.out, + stderr=self.err, + dry_run=True, + interactive=False, + ) + + self.assertEqual(Book.objects.count(), 0) + + def test_import_command_errors(self): + with tempfile.NamedTemporaryFile(mode="w+", suffix=".csv") as tmp_csv: + tmp_csv.write(CSV_CONTENT_WITH_ERRORS) + tmp_csv.seek(0) + with self.assertRaises(SystemExit): + call_command( + "import", + "core.Book", + tmp_csv.name, + stdout=self.out, + stderr=self.err, + interactive=False, + ) + + assert "Import errors!" in self.err.getvalue() + self.assertEqual(Book.objects.count(), 0) + + def test_import_command_with_base_errors(self): + with tempfile.NamedTemporaryFile(mode="w+", suffix=".csv") as tmp_csv: + tmp_csv.write(CSV_CONTENT) + tmp_csv.seek(0) + with self.assertRaises(SystemExit): + call_command( + "import", + "core.tests.test_command_import.BookResourceWithError", + tmp_csv.name, + stdout=self.out, + stderr=self.err, + interactive=False, + ) + + assert "Import base errors" in self.err.getvalue() + self.assertEqual(Book.objects.count(), 0) + + def test_import_command_interactive(self): + with mock.patch("builtins.input", side_effect=lambda msg: "no"): + with tempfile.NamedTemporaryFile(mode="w+", suffix=".csv") as tmp_csv: + tmp_csv.write(CSV_CONTENT) + tmp_csv.seek(0) + with self.assertRaises(CommandError) as e: + call_command( + "import", + "core.Book", + tmp_csv.name, + stdout=self.out, + stderr=self.err, + ) + assert e.exception.args[0] == "Import cancelled." + + self.assertEqual(Book.objects.count(), 0) diff --git a/tests/core/tests/test_command_utils.py b/tests/core/tests/test_command_utils.py new file mode 100644 index 000000000..92e7218fe --- /dev/null +++ b/tests/core/tests/test_command_utils.py @@ -0,0 +1,74 @@ +from core.admin import BookResource +from core.models import Book +from django.core.management import CommandError +from django.test import TestCase + +from import_export.command_utils import ( + get_default_format_names, + get_format_class, + get_resource_class, +) +from import_export.formats import base_formats + + +class GetResourceClassTest(TestCase): + def test_load_by_model(self): + resource_class = get_resource_class("core.Book") + self.assertIsNotNone(resource_class) + self.assertEqual(resource_class.Meta.model, Book) + + def test_load_by_resource(self): + resource_class = get_resource_class("core.admin.BookResource") + self.assertEqual(resource_class, BookResource) + + def test_invalid_name(self): + invalid_name = "invalid.model" + with self.assertRaises(CommandError) as context: + get_resource_class(invalid_name) + self.assertEqual( + str(context.exception), + f"Cannot import '{invalid_name}' as a resource class or model.", + ) + + +class GetFormatClassTest(TestCase): + def test_load_by_format_name(self): + format_class = get_format_class("CSV", None) + self.assertIsInstance(format_class, base_formats.CSV) + + def test_load_by_full_format_path(self): + format_class = get_format_class("import_export.formats.base_formats.CSV", None) + self.assertIsInstance(format_class, base_formats.CSV) + + def test_invalid_format_name(self): + invalid_format = "EXCEL" + with self.assertRaises(CommandError) as context: + get_format_class(invalid_format, None) + self.assertIn( + "Cannot import 'EXCEL' or 'import_export.formats.base_formats.EXCEL'", + str(context.exception), + ) + + def test_load_by_file_name_with_known_mime_type(self): + format_class = get_format_class(None, "test.csv") + self.assertIsInstance(format_class, base_formats.CSV) + + def test_load_by_file_name_with_unknown_mime_type(self): + with self.assertRaises(CommandError) as context: + get_format_class(None, "test.unknown") + self.assertIn( + "Cannot determine MIME type for 'test.unknown'", str(context.exception) + ) + + def test_load_by_file_name_with_no_mime_mapping(self): + with self.assertRaises(CommandError) as context: + get_format_class(None, "test.pdf") + self.assertIn( + "Cannot find format for MIME type 'application/pdf'", str(context.exception) + ) + + +class GetDefaultFormatNamesTest(TestCase): + def test_get_default_format_names(self): + format_names = get_default_format_names() + self.assertIsInstance(format_names, str) diff --git a/tests/core/tests/test_declarative.py b/tests/core/tests/test_declarative.py new file mode 100644 index 000000000..d9f59d9c5 --- /dev/null +++ b/tests/core/tests/test_declarative.py @@ -0,0 +1,123 @@ +from django.test import TestCase + +from import_export import fields +from import_export.resources import Resource + +from .resources import MyResource + + +class TestInheritance(TestCase): + # Issue 140 Attributes aren't inherited by subclasses + def test_inheritance(self): + class A(MyResource): + inherited = fields.Field() + + class Meta: + import_id_fields = ("email",) + + class B(A): + local = fields.Field() + + class Meta: + export_order = ("email", "extra") + + resource = B() + self.assertIn("name", resource.fields) + self.assertIn("inherited", resource.fields) + self.assertIn("local", resource.fields) + self.assertEqual( + resource.get_export_headers(), + ["email", "extra", "name", "inherited", "local"], + ) + self.assertEqual(resource._meta.import_id_fields, ("email",)) + + def test_inheritance_with_custom_attributes(self): + class A(MyResource): + inherited = fields.Field() + + class Meta: + import_id_fields = ("email",) + custom_attribute = True + + class B(A): + local = fields.Field() + + resource = B() + self.assertEqual(resource._meta.custom_attribute, True) + + +class TestMultiInheritance(TestCase): + def test_meta_inheritance_3_levels(self): + # issue 1363 + class GrandparentResource(Resource): + class Meta: + batch_size = 666 + + class ParentResource(GrandparentResource): + class Meta: + pass + + class ChildResource(ParentResource): + class Meta: + pass + + parent_resource = ParentResource() + child_resource = ChildResource() + self.assertEqual(666, parent_resource._meta.batch_size) + self.assertEqual(666, child_resource._meta.batch_size) + + def test_meta_inheritance_2_levels(self): + class GrandparentResource(Resource): + class Meta: + batch_size = 666 + + class ParentResource(GrandparentResource): + class Meta: + batch_size = 333 + + class ChildResource(ParentResource): + class Meta: + pass + + parent_resource = ParentResource() + child_resource = ChildResource() + self.assertEqual(333, parent_resource._meta.batch_size) + self.assertEqual(333, child_resource._meta.batch_size) + + def test_meta_inheritance_1_level(self): + class GrandparentResource(Resource): + class Meta: + batch_size = 666 + + class ParentResource(GrandparentResource): + class Meta: + batch_size = 333 + + class ChildResource(ParentResource): + class Meta: + batch_size = 111 + + parent_resource = ParentResource() + child_resource = ChildResource() + self.assertEqual(333, parent_resource._meta.batch_size) + self.assertEqual(111, child_resource._meta.batch_size) + + def test_meta_inheritance_default(self): + class GrandparentResource(Resource): + class Meta: + pass + + class ParentResource(GrandparentResource): + class Meta: + pass + + class ChildResource(ParentResource): + class Meta: + pass + + grandparent_resource = GrandparentResource() + parent_resource = ParentResource() + child_resource = ChildResource() + self.assertEqual(1000, grandparent_resource._meta.batch_size) + self.assertEqual(1000, parent_resource._meta.batch_size) + self.assertEqual(1000, child_resource._meta.batch_size) diff --git a/tests/core/tests/test_fields.py b/tests/core/tests/test_fields.py index c5f442b9b..a7aea4dc5 100644 --- a/tests/core/tests/test_fields.py +++ b/tests/core/tests/test_fields.py @@ -1,86 +1,196 @@ from datetime import date +from unittest import mock +import tablib +from core.models import Book +from core.tests.resources import BookResource from django.test import TestCase from import_export import fields +from import_export.exceptions import FieldError class Obj: - def __init__(self, name, date=None): self.name = name self.date = date class FieldTest(TestCase): - def setUp(self): - self.field = fields.Field(column_name='name', attribute='name') + self.field = fields.Field(column_name="name", attribute="name") self.row = { - 'name': 'Foo', + "name": "Foo", } - self.obj = Obj(name='Foo', date=date(2012, 8, 13)) + self.obj = Obj(name="Foo", date=date(2012, 8, 13)) def test_clean(self): - self.assertEqual(self.field.clean(self.row), - self.row['name']) + self.assertEqual(self.field.clean(self.row), self.row["name"]) def test_clean_raises_KeyError(self): - self.field.column_name = 'x' - with self.assertRaisesRegex(KeyError, "Column 'x' not found in dataset. Available columns are: \\['name'\\]"): + self.field.column_name = "x" + with self.assertRaisesRegex( + KeyError, + "Column 'x' not found in dataset. Available columns are: \\['name'\\]", + ): self.field.clean(self.row) def test_export(self): - self.assertEqual(self.field.export(self.obj), - self.row['name']) + self.assertEqual(self.field.export(self.obj), self.row["name"]) + + def test_export_none(self): + # 1872 + instance = Obj(name=None) + self.assertEqual("", self.field.export(instance)) def test_save(self): - self.row['name'] = 'foo' + self.row["name"] = "foo" self.field.save(self.obj, self.row) - self.assertEqual(self.obj.name, 'foo') + self.assertEqual(self.obj.name, "foo") def test_save_follow(self): class Test: class name: class follow: - me = 'bar' + me = "bar" test = Test() - field = fields.Field(column_name='name', attribute='name__follow__me') - row = {'name': 'foo'} + field = fields.Field(column_name="name", attribute="name__follow__me") + row = {"name": "foo"} field.save(test, row) - self.assertEqual(test.name.follow.me, 'foo') + self.assertEqual(test.name.follow.me, "foo") def test_following_attribute(self): - field = fields.Field(attribute='other_obj__name') + field = fields.Field(attribute="other_obj__name") obj2 = Obj(name="bar") self.obj.other_obj = obj2 self.assertEqual(field.export(self.obj), "bar") def test_default(self): - field = fields.Field(default=1, column_name='name') - self.assertEqual(field.clean({'name': None}), 1) + field = fields.Field(default=1, column_name="name") + self.assertEqual(field.clean({"name": None}), 1) def test_default_falsy_values(self): - field = fields.Field(default=1, column_name='name') - self.assertEqual(field.clean({'name': 0}), 0) + field = fields.Field(default=1, column_name="name") + self.assertEqual(field.clean({"name": 0}), 0) def test_default_falsy_values_without_default(self): - field = fields.Field(column_name='name') - self.assertEqual(field.clean({'name': 0}), 0) + field = fields.Field(column_name="name") + self.assertEqual(field.clean({"name": 0}), 0) def test_saves_null_values(self): - field = fields.Field(column_name='name', attribute='name', saves_null_values=False) + field = fields.Field( + column_name="name", attribute="name", saves_null_values=False + ) row = { - 'name': None, + "name": None, } field.save(self.obj, row) - self.assertEqual(self.obj.name, 'Foo') + self.assertEqual(self.obj.name, "Foo") self.field.save(self.obj, row) self.assertIsNone(self.obj.name) def test_repr(self): - self.assertEqual(repr(self.field), '') + self.assertEqual(repr(self.field), "") self.field.column_name = None - self.assertEqual(repr(self.field), '') + self.assertEqual(repr(self.field), "") + + def testget_dehydrate_method_default(self): + field = fields.Field(attribute="foo", column_name="bar") + + # `field_name` is the variable name defined in `Resource` + resource_field_name = "field" + method_name = field.get_dehydrate_method(resource_field_name) + self.assertEqual(f"dehydrate_{resource_field_name}", method_name) + + def testget_dehydrate_method_with_custom_method_name(self): + custom_dehydrate_method = "custom_method_name" + field = fields.Field( + attribute="foo", column_name="bar", dehydrate_method=custom_dehydrate_method + ) + resource_field_name = "field" + method_name = field.get_dehydrate_method(resource_field_name) + self.assertEqual(method_name, custom_dehydrate_method) + + def test_get_dehydrate_method_with_callable(self): + field = fields.Field( + attribute="foo", column_name="bar", dehydrate_method=lambda x: x + ) + resource_field_name = "field" + method = field.get_dehydrate_method(resource_field_name) + self.assertTrue(callable(method)) + + def testget_dehydrate_method_without_params_raises_attribute_error(self): + field = fields.Field(attribute="foo", column_name="bar") + + self.assertRaises(FieldError, field.get_dehydrate_method) + + def test_m2m_add_true(self): + m2m_related_manager = mock.Mock(spec=["add", "set", "all"]) + m2m_related_manager.all.return_value = [] + self.obj.aliases = m2m_related_manager + field = fields.Field(column_name="aliases", attribute="aliases", m2m_add=True) + row = { + "aliases": ["Foo", "Bar"], + } + field.save(self.obj, row, is_m2m=True) + + self.assertEqual(m2m_related_manager.add.call_count, 1) + self.assertEqual(m2m_related_manager.set.call_count, 0) + m2m_related_manager.add.assert_called_once_with("Foo", "Bar") + + row = { + "aliases": ["apple"], + } + field.save(self.obj, row, is_m2m=True) + m2m_related_manager.add.assert_called_with("apple") + + def test_m2m_add_False(self): + m2m_related_manager = mock.Mock(spec=["add", "set", "all"]) + self.obj.aliases = m2m_related_manager + field = fields.Field(column_name="aliases", attribute="aliases") + row = { + "aliases": ["Foo", "Bar"], + } + field.save(self.obj, row, is_m2m=True) + + self.assertEqual(m2m_related_manager.add.call_count, 0) + self.assertEqual(m2m_related_manager.set.call_count, 1) + m2m_related_manager.set.assert_called_once_with(["Foo", "Bar"]) + + def test_get_value_with_callable(self): + class CallableValue: + def __call__(self): + return "some val" + + self.obj.name = CallableValue() + val = self.field.get_value(self.obj) + self.assertEqual("some val", val) + + def test_get_value_with_no_attribute(self): + self.field.attribute = None + self.assertIsNone(self.field.get_value(self.obj)) + + def test_import_null_django_CharField_saved_as_empty_string(self): + # issue 1485 + resource = BookResource() + self.assertTrue(resource._meta.model.author_email.field.blank) + self.assertFalse(resource._meta.model.author_email.field.null) + headers = ["id", "author_email"] + row = [1, None] + dataset = tablib.Dataset(row, headers=headers) + resource.import_data(dataset, raise_errors=True) + book = Book.objects.get(id=1) + self.assertEqual("", book.author_email) + + def test_import_empty_django_CharField_saved_as_empty_string(self): + resource = BookResource() + self.assertTrue(resource._meta.model.author_email.field.blank) + self.assertFalse(resource._meta.model.author_email.field.null) + headers = ["id", "author_email"] + row = [1, ""] + dataset = tablib.Dataset(row, headers=headers) + resource.import_data(dataset, raise_errors=True) + book = Book.objects.get(id=1) + self.assertEqual("", book.author_email) diff --git a/tests/core/tests/test_forms.py b/tests/core/tests/test_forms.py new file mode 100644 index 000000000..d433f21d4 --- /dev/null +++ b/tests/core/tests/test_forms.py @@ -0,0 +1,268 @@ +import django.forms +from core.models import Author +from django.test import TestCase + +from import_export import forms, resources +from import_export.formats.base_formats import CSV + +from .resources import BookResource, BookResourceWithStoreInstance + + +class MyResource(resources.ModelResource): + class Meta: + name = "My super resource" + + +class FormTest(TestCase): + def test_formbase_init_blank_resources(self): + with self.assertRaises(ValueError): + forms.ImportExportFormBase(["format1"], []) + + def test_formbase_init_one_resource(self): + form = forms.ImportExportFormBase([CSV], [resources.ModelResource]) + self.assertEqual( + form.fields["resource"].choices, + [(0, "ModelResource")], + ) + self.assertEqual(form.initial["resource"], "0") + self.assertIsInstance( + form.fields["resource"].widget, + django.forms.HiddenInput, + ) + + def test_formbase_init_two_resources(self): + form = forms.ImportExportFormBase([CSV], [resources.ModelResource, MyResource]) + self.assertEqual( + form.fields["resource"].choices, + [(0, "ModelResource"), (1, "My super resource")], + ) + self.assertNotIn("resource", form.initial) + self.assertIsInstance( + form.fields["resource"].widget, + django.forms.Select, + ) + + +class ImportFormMediaTest(TestCase): + def test_import_form_media(self): + form = forms.ImportForm([CSV], [MyResource]) + media = form.media + self.assertEqual( + media._css, + {}, + ) + self.assertEqual( + media._js, + [ + "admin/js/vendor/jquery/jquery.min.js", + "admin/js/jquery.init.js", + "import_export/guess_format.js", + ], + ) + + def test_import_form_and_custom_widget_media(self): + class TestMediaWidget(django.forms.TextInput): + """Dummy test widget with associated CSS and JS media.""" + + class Media: + css = { + "all": ["test.css"], + } + js = ["test.js"] + + class CustomImportForm(forms.ImportForm): + """Dummy custom import form with a custom widget.""" + + author = django.forms.ModelChoiceField( + queryset=Author.objects.none(), + required=True, + widget=TestMediaWidget, + ) + + form = CustomImportForm([CSV], [MyResource]) + media = form.media + self.assertEqual( + media._css, + {"all": ["test.css"]}, + ) + self.assertEqual( + media._js, + [ + "test.js", + "admin/js/vendor/jquery/jquery.min.js", + "admin/js/jquery.init.js", + "import_export/guess_format.js", + ], + ) + + +class SelectableFieldsExportFormTest(TestCase): + @classmethod + def setUpTestData(cls) -> None: + cls.resources = (BookResource, BookResourceWithStoreInstance) + cls.form = forms.SelectableFieldsExportForm( + formats=(CSV,), + resources=cls.resources, + ) + + def test_create_boolean_fields(self) -> None: + form_fields = self.form.fields + + for resource in self.resources: + fields = resource().get_export_order() + for field in fields: + field_name = forms.SelectableFieldsExportForm.create_boolean_field_name( + resource, field + ) + self.assertIn(field_name, form_fields) + form_field = form_fields[field_name] + self.assertIsInstance(form_field, django.forms.BooleanField) + + def test_form_raises_validation_error_when_no_resource_fields_are_selected( + self, + ) -> None: + data = {"resource": "0", "format": "0", "bookresource_id": False} + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=self.resources, data=data + ) + self.assertFalse(form.is_valid()) + self.assertTrue("Select at least 1 field for" in form.errors.as_text()) + + def test_remove_unselected_resource_fields_on_validation(self): + data = {"resource": "0", "format": "0"} + + # Add all field values to form data for validation + for resource in self.resources: + for field in resource().get_export_order(): + data[ + forms.SelectableFieldsExportForm.create_boolean_field_name( + resource, field + ) + ] = True + + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=self.resources, data=data + ) + + self.assertTrue(form.is_valid()) + + selected_resource = self.resources[0] + selected_resource_fields = selected_resource().get_export_order() + not_selected_resource = self.resources[1] # resource on index 0 was selected + + for field in not_selected_resource().get_export_order(): + # Only assert fields which doesn't exist in selected resource's fields + if field not in selected_resource_fields: + self.assertNotIn(field, form.cleaned_data) + + def test_normalize_resource_field_names(self) -> None: + """ + Field names are combination of resource's name and field name. + After validation, fields that belong to unselected resources are removed + and resource name is removed from field names + """ + + data = {"resource": "0", "format": "0"} + + # Add all field values to form data for validation + for resource in self.resources: + for field in resource().get_export_order(): + data[ + forms.SelectableFieldsExportForm.create_boolean_field_name( + resource, field + ) + ] = "on" + + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=self.resources, data=data + ) + self.assertTrue(form.is_valid()) + selected_resource = self.resources[0] + + for field in selected_resource().get_export_order(): + self.assertIn(field, form.cleaned_data) + + def test_get_selected_resource_fields_without_validation_raises_validation_error( + self, + ) -> None: + self.assertRaises( + django.forms.ValidationError, self.form.get_selected_resource_export_fields + ) + + def test_get_field_label(self): + """test SelectableFieldsExportForm._get_field_label""" + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=(BookResource,) + ) + resource = BookResource() + self.assertEqual( + form._get_field_label(resource, "bookresource_id"), + "Bookresource Id", + ) + self.assertEqual( + form._get_field_label(resource, "published"), "Published (published_date)" + ) + + def test_get_selected_resrource_fields(self) -> None: + data = {"resource": "0", "format": "0"} + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=self.resources, data=data + ) + for resource in self.resources: + for field in resource().get_export_order(): + data[ + forms.SelectableFieldsExportForm.create_boolean_field_name( + resource, field + ) + ] = "on" + + self.assertTrue(form.is_valid()) + selected_resource = self.resources[0]() + + self.assertEqual( + form.get_selected_resource_export_fields(), + list(selected_resource.get_export_order()), + ) + + def test_fields_order(self) -> None: + form = forms.SelectableFieldsExportForm( + formats=(CSV,), resources=(BookResource,) + ) + + self.assertEqual( + list(form.fields.keys()), + [ + "resource", + "bookresource_id", + "bookresource_name", + "bookresource_author", + "bookresource_author_email", + "bookresource_published", + "bookresource_published_time", + "bookresource_price", + "bookresource_added", + "bookresource_categories", + "format", + "export_items", + ], + ) + + def test_resource_boolean_field_attributes(self) -> None: + for resource_index, resource in enumerate(self.resources): + resource_fields = resource().get_export_order() + initial_field_checked = False + + for resource_field in resource_fields: + field_name = forms.SelectableFieldsExportForm.create_boolean_field_name( + resource, resource_field + ) + form_field = self.form.fields[field_name] + + if not initial_field_checked: + self.assertTrue(form_field.initial_field) + initial_field_checked = True + + self.assertTrue(form_field.is_selectable_field) + self.assertEqual(form_field.resource_name, resource.__name__) + self.assertEqual(form_field.resource_index, resource_index) + self.assertEqual(form_field.widget.attrs["resource-id"], resource_index) diff --git a/tests/core/tests/test_import_export_tags.py b/tests/core/tests/test_import_export_tags.py new file mode 100644 index 000000000..931152b0d --- /dev/null +++ b/tests/core/tests/test_import_export_tags.py @@ -0,0 +1,12 @@ +from unittest import TestCase + +from import_export.templatetags import import_export_tags + + +class TagsTest(TestCase): + def test_compare_values(self): + target = ( + 'a' + 'b' + ) + self.assertEqual(target, import_export_tags.compare_values("a", "b")) diff --git a/tests/core/tests/test_instance_loaders.py b/tests/core/tests/test_instance_loaders.py index cea250850..01b8ff30a 100644 --- a/tests/core/tests/test_instance_loaders.py +++ b/tests/core/tests/test_instance_loaders.py @@ -6,7 +6,6 @@ class BaseInstanceLoaderTest(TestCase): - def test_get_instance(self): instance_loader = instance_loaders.BaseInstanceLoader(None) with self.assertRaises(NotImplementedError): @@ -14,7 +13,6 @@ def test_get_instance(self): class ModelInstanceLoaderTest(TestCase): - def setUp(self): self.resource = resources.modelresource_factory(Book)() @@ -27,22 +25,21 @@ def test_get_instance_returns_None_when_params_is_empty(self): class CachedInstanceLoaderTest(TestCase): - def setUp(self): self.resource = resources.modelresource_factory(Book)() - self.dataset = tablib.Dataset(headers=['id', 'name', 'author_email']) + self.dataset = tablib.Dataset(headers=["id", "name", "author_email"]) self.book = Book.objects.create(name="Some book") self.book2 = Book.objects.create(name="Some other book") - row = [str(self.book.pk), 'Some book', 'test@example.com'] + row = [str(self.book.pk), "Some book", "test@example.com"] self.dataset.append(row) self.instance_loader = instance_loaders.CachedInstanceLoader( - self.resource, self.dataset) + self.resource, self.dataset + ) def test_all_instances(self): self.assertTrue(self.instance_loader.all_instances) self.assertEqual(len(self.instance_loader.all_instances), 1) - self.assertEqual(list(self.instance_loader.all_instances), - [self.book.pk]) + self.assertEqual(list(self.instance_loader.all_instances), [self.book.pk]) def test_get_instance(self): obj = self.instance_loader.get_instance(self.dataset.dict[0]) @@ -56,13 +53,14 @@ class CachedInstanceLoaderWithAbsentImportIdFieldTest(TestCase): def setUp(self): self.resource = resources.modelresource_factory(Book)() - self.dataset = tablib.Dataset(headers=['name', 'author_email']) + self.dataset = tablib.Dataset(headers=["name", "author_email"]) self.book = Book.objects.create(name="Some book") self.book2 = Book.objects.create(name="Some other book") - row = ['Some book', 'test@example.com'] + row = ["Some book", "test@example.com"] self.dataset.append(row) self.instance_loader = instance_loaders.CachedInstanceLoader( - self.resource, self.dataset) + self.resource, self.dataset + ) def test_all_instances(self): self.assertEqual(self.instance_loader.all_instances, {}) diff --git a/tests/core/tests/test_invalidrow.py b/tests/core/tests/test_invalidrow.py index d382e825e..ac11bc81c 100644 --- a/tests/core/tests/test_invalidrow.py +++ b/tests/core/tests/test_invalidrow.py @@ -5,24 +5,22 @@ class InvalidRowTest(TestCase): - def setUp(self): - # Create a ValidationError with a mix of field-specific and non-field-specific errors - self.non_field_errors = ValidationError(['Error 1', 'Error 2', 'Error 3']) - self.field_errors = ValidationError({ - 'name': ['Error 4', 'Error 5'], - 'birthday': ['Error 6', 'Error 7'], - }) + # Create a ValidationError with a mix of field-specific + # and non-field-specific errors + self.non_field_errors = ValidationError(["Error 1", "Error 2", "Error 3"]) + self.field_errors = ValidationError( + { + "name": ["Error 4", "Error 5"], + "birthday": ["Error 6", "Error 7"], + } + ) combined_error_dict = self.non_field_errors.update_error_dict( self.field_errors.error_dict.copy() ) e = ValidationError(combined_error_dict) # Create an InvalidRow instance to use in tests - self.obj = InvalidRow( - number=1, - validation_error=e, - values=['ABC', '123'] - ) + self.obj = InvalidRow(number=1, validation_error=e, values=["ABC", "123"]) def test_error_count(self): self.assertEqual(self.obj.error_count, 7) @@ -30,21 +28,21 @@ def test_error_count(self): def test_non_field_specific_errors(self): result = self.obj.non_field_specific_errors self.assertIsInstance(result, list) - self.assertEqual(result, ['Error 1', 'Error 2', 'Error 3']) + self.assertEqual(result, ["Error 1", "Error 2", "Error 3"]) def test_field_specific_errors(self): result = self.obj.field_specific_errors self.assertIsInstance(result, dict) self.assertEqual(len(result), 2) - self.assertEqual(result['name'], ['Error 4', 'Error 5']) - self.assertEqual(result['birthday'], ['Error 6', 'Error 7']) + self.assertEqual(result["name"], ["Error 4", "Error 5"]) + self.assertEqual(result["birthday"], ["Error 6", "Error 7"]) - def test_creates_error_dict_from_error_list_if_validation_error_only_has_error_list(self): - obj = InvalidRow( - number=1, - validation_error=self.non_field_errors, - values=[] - ) + def test_creates_error_dict_from_error_list_if_validation_error_only_has_error_list( + self, + ): + obj = InvalidRow(number=1, validation_error=self.non_field_errors, values=[]) self.assertIsInstance(obj.error_dict, dict) self.assertIn(NON_FIELD_ERRORS, obj.error_dict) - self.assertEqual(obj.error_dict[NON_FIELD_ERRORS], ['Error 1', 'Error 2', 'Error 3']) + self.assertEqual( + obj.error_dict[NON_FIELD_ERRORS], ["Error 1", "Error 2", "Error 3"] + ) diff --git a/tests/core/tests/test_mixins.py b/tests/core/tests/test_mixins.py index 0562fe221..e4a479720 100644 --- a/tests/core/tests/test_mixins.py +++ b/tests/core/tests/test_mixins.py @@ -1,3 +1,4 @@ +import warnings from unittest import mock from unittest.mock import MagicMock @@ -6,33 +7,38 @@ from django.test.testcases import TestCase from django.urls import reverse -from import_export import formats, forms, mixins +from import_export import admin, formats, forms, mixins, resources +from import_export.resources import modelresource_factory class ExportViewMixinTest(TestCase): class TestExportForm(forms.ExportForm): - cleaned_data = dict() + cleaned_data = {} def setUp(self): - self.url = reverse('export-category') - self.cat1 = Category.objects.create(name='Cat 1') - self.cat2 = Category.objects.create(name='Cat 2') - self.form = ExportViewMixinTest.TestExportForm(formats.base_formats.DEFAULT_FORMATS) - self.form.cleaned_data["file_format"] = "0" + self.url = reverse("export-category") + self.cat1 = Category.objects.create(name="Cat 1") + self.cat2 = Category.objects.create(name="Cat 2") + self.resource = modelresource_factory(Category) + self.form = ExportViewMixinTest.TestExportForm( + formats=formats.base_formats.DEFAULT_FORMATS, + resources=[self.resource], + ) + self.form.cleaned_data["format"] = "0" def test_get(self): response = self.client.get(self.url) self.assertContains(response, self.cat1.name, status_code=200) - self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') + self.assertEqual(response["Content-Type"], "text/html; charset=utf-8") def test_post(self): - data = { - 'file_format': '0', - } - response = self.client.post(self.url, data) + data = {"format": "0", "categoryresource_id": True} + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + response = self.client.post(self.url, data) self.assertContains(response, self.cat1.name, status_code=200) self.assertTrue(response.has_header("Content-Disposition")) - self.assertEqual(response['Content-Type'], 'text/csv') + self.assertEqual(response["Content-Type"], "text/csv") def test_get_response_raises_TypeError_when_content_type_kwarg_used(self): """ @@ -40,44 +46,55 @@ def test_get_response_raises_TypeError_when_content_type_kwarg_used(self): """ content_type = "text/csv" - class TestMixin(mixins.ExportViewFormMixin): - def __init__(self): - self.model = MagicMock() - self.request = MagicMock(spec=HttpRequest) - self.model.__name__ = "mockModel" + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + + class TestMixin(mixins.ExportViewFormMixin): + def __init__(self): + self.model = MagicMock() + self.request = MagicMock(spec=HttpRequest) + self.model.__name__ = "mockModel" - def get_queryset(self): - return MagicMock() + def get_queryset(self): + return MagicMock() m = TestMixin() with mock.patch("import_export.mixins.HttpResponse") as mock_http_response: # on first instantiation, raise TypeError, on second, return mock mock_http_response.side_effect = [TypeError(), mock_http_response] m.form_valid(self.form) - self.assertEqual(content_type, mock_http_response.call_args_list[0][1]["content_type"]) - self.assertEqual(content_type, mock_http_response.call_args_list[1][1]["mimetype"]) + self.assertEqual( + content_type, mock_http_response.call_args_list[0][1]["content_type"] + ) + self.assertEqual( + content_type, mock_http_response.call_args_list[1][1]["mimetype"] + ) def test_implements_get_filterset(self): """ test that if the class-under-test defines a get_filterset() method, then this is called as required. """ - class TestMixin(mixins.ExportViewFormMixin): - mock_get_filterset_call_count = 0 - mock_get_filterset_class_call_count = 0 - def __init__(self): - self.model = MagicMock() - self.request = MagicMock(spec=HttpRequest) - self.model.__name__ = "mockModel" + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + + class TestMixin(mixins.ExportViewFormMixin): + mock_get_filterset_call_count = 0 + mock_get_filterset_class_call_count = 0 + + def __init__(self): + self.model = MagicMock() + self.request = MagicMock(spec=HttpRequest) + self.model.__name__ = "mockModel" - def get_filterset(self, filterset_class): - self.mock_get_filterset_call_count += 1 - return MagicMock() + def get_filterset(self, filterset_class): + self.mock_get_filterset_call_count += 1 + return MagicMock() - def get_filterset_class(self): - self.mock_get_filterset_class_call_count += 1 - return MagicMock() + def get_filterset_class(self): + self.mock_get_filterset_class_call_count += 1 + return MagicMock() m = TestMixin() res = m.form_valid(self.form) @@ -87,9 +104,8 @@ def get_filterset_class(self): class BaseImportMixinTest(TestCase): - def test_get_import_formats(self): - class Format(object): + class Format: def __init__(self, id, can_import): self.id = id self.val = can_import @@ -105,43 +121,52 @@ class CannotImportFormat(Format): def __init__(self): super().__init__(2, False) - m = mixins.BaseImportMixin() - m.formats = [CanImportFormat, CannotImportFormat] + class TestBaseImportMixin(mixins.BaseImportMixin): + @property + def import_formats(self): + return [CanImportFormat, CannotImportFormat] + + m = TestBaseImportMixin() formats = m.get_import_formats() self.assertEqual(1, len(formats)) - self.assertEqual('CanImportFormat', formats[0].__name__) + self.assertEqual("CanImportFormat", formats[0].__name__) + + +class FooResource(resources.Resource): + pass class MixinModelAdminTest(TestCase): """ - Tests for regression where methods in ModelAdmin with BaseImportMixin / BaseExportMixin - do not get called. + Tests for regression where methods in ModelAdmin with + BaseImportMixin / BaseExportMixin do not get called. see #1315. """ + request = MagicMock(spec=HttpRequest) class BaseImportModelAdminTest(mixins.BaseImportMixin): call_count = 0 - def get_resource_class(self): + def get_resource_classes(self, request, **kwargs): self.call_count += 1 - def get_resource_kwargs(self, request, *args, **kwargs): + def get_resource_kwargs(self, request, **kwargs): self.call_count += 1 class BaseExportModelAdminTest(mixins.BaseExportMixin): call_count = 0 - def get_resource_class(self): + def get_resource_classes(self, request, **kwargs): self.call_count += 1 - def get_resource_kwargs(self, request, *args, **kwargs): + def get_export_resource_kwargs(self, request, **kwargs): self.call_count += 1 def test_get_import_resource_class_calls_self_get_resource_class(self): admin = self.BaseImportModelAdminTest() - admin.get_import_resource_class() + admin.get_import_resource_classes(self.request) self.assertEqual(1, admin.call_count) def test_get_import_resource_kwargs_calls_self_get_resource_kwargs(self): @@ -151,7 +176,7 @@ def test_get_import_resource_kwargs_calls_self_get_resource_kwargs(self): def test_get_export_resource_class_calls_self_get_resource_class(self): admin = self.BaseExportModelAdminTest() - admin.get_export_resource_class() + admin.get_export_resource_classes(self.request) self.assertEqual(1, admin.call_count) def test_get_export_resource_kwargs_calls_self_get_resource_kwargs(self): @@ -159,15 +184,157 @@ def test_get_export_resource_kwargs_calls_self_get_resource_kwargs(self): admin.get_export_resource_kwargs(self.request) self.assertEqual(1, admin.call_count) + class BaseModelResourceClassTest(mixins.BaseImportMixin, mixins.BaseExportMixin): + resource_class = resources.Resource + export_call_count = 0 + import_call_count = 0 + + def get_export_resource_class(self): + self.export_call_count += 1 + + def get_import_resource_class(self): + self.import_call_count += 1 + + def test_deprecated_resource_class_raises_warning(self): + """Test that the mixin throws error if user didn't + migrate to resource_classes""" + admin = self.BaseModelResourceClassTest() + msg = ( + "The 'get_export_resource_class()' method has been deprecated. " + "Please implement the new 'get_export_resource_classes()' method in " + "core.tests.test_mixins.MixinModelAdminTest.BaseModelResourceClassTest" + ) + with self.assertWarns(DeprecationWarning, msg=msg): + admin.get_export_resource_classes(self.request) + + msg = ( + "The 'get_import_resource_class()' method has been deprecated. " + "Please implement the new 'get_import_resource_classes()' method in " + "core.tests.test_mixins.MixinModelAdminTest.BaseModelResourceClassTest" + ) + with self.assertWarns(DeprecationWarning, msg=msg): + admin.get_import_resource_classes(self.request) + + msg = ( + "The 'resource_class' field has been deprecated. " + "Please implement the new 'resource_classes' field in " + "core.tests.test_mixins.MixinModelAdminTest.BaseModelResourceClassTest" + ) + with self.assertWarns(DeprecationWarning, msg=msg): + self.assertEqual( + admin.get_resource_classes(self.request), [resources.Resource] + ) + + self.assertEqual(1, admin.export_call_count) + self.assertEqual(1, admin.import_call_count) + + class BaseModelGetExportResourceClassTest(mixins.BaseExportMixin): + def get_resource_class(self): + pass + + def test_deprecated_get_resource_class_raises_warning(self): + """Test that the mixin throws error if user + didn't migrate to resource_classes""" + admin = self.BaseModelGetExportResourceClassTest() + msg = ( + "The 'get_resource_class()' method has been deprecated. " + "Please implement the new 'get_resource_classes()' method in " + "core.tests.test_mixins.MixinModelAdminTest." + "BaseModelGetExportResourceClassTest" + ) + with self.assertWarns(DeprecationWarning, msg=msg): + admin.get_resource_classes(self.request) + + class BaseModelAdminFaultyResourceClassesTest(mixins.BaseExportMixin): + resource_classes = resources.Resource + + def test_faulty_resource_class_raises_exception(self): + """Test fallback mechanism to old get_export_resource_class() method""" + admin = self.BaseModelAdminFaultyResourceClassesTest() + with self.assertRaisesRegex( + Exception, r"^The resource_classes field type must be subscriptable" + ): + admin.get_export_resource_classes(self.request) + + class BaseModelAdminBothResourceTest(mixins.BaseExportMixin): + call_count = 0 + + resource_class = resources.Resource + resource_classes = [resources.Resource] + + def test_both_resource_class_raises_exception(self): + """Test fallback mechanism to old get_export_resource_class() method""" + admin = self.BaseModelAdminBothResourceTest() + with self.assertRaisesRegex( + Exception, "Only one of 'resource_class' and 'resource_classes' can be set" + ): + admin.get_export_resource_classes(self.request) + + class BaseModelExportChooseTest(mixins.BaseExportMixin): + resource_classes = [resources.Resource, FooResource] + + @mock.patch("import_export.admin.SelectableFieldsExportForm") + def test_choose_export_resource_class(self, form): + """Test choose_export_resource_class() method""" + admin = self.BaseModelExportChooseTest() + self.assertEqual( + admin.choose_export_resource_class(form, self.request), resources.Resource + ) + + form.cleaned_data = {"resource": 1} + self.assertEqual( + admin.choose_export_resource_class(form, self.request), FooResource + ) + + class BaseModelImportChooseTest(mixins.BaseImportMixin): + resource_classes = [resources.Resource, FooResource] + + @mock.patch("import_export.admin.ImportForm") + def test_choose_import_resource_class(self, form): + """Test choose_import_resource_class() method""" + admin = self.BaseModelImportChooseTest() + request = MagicMock(spec=HttpRequest) + self.assertEqual( + admin.choose_import_resource_class(form, request), + resources.Resource, + ) + + form.cleaned_data = {"resource": 1} + self.assertEqual(admin.choose_import_resource_class(form, request), FooResource) + + class BaseModelResourceClassOldTest(mixins.BaseImportMixin, mixins.BaseExportMixin): + def get_resource_class(self): + return FooResource + + def test_get_resource_class_old(self): + """ + Test that if only the old get_resource_class() method is defined, + the get_export_resource_classes() and get_import_resource_classes() + still return list of resources. + """ + admin = self.BaseModelResourceClassOldTest() + msg = ( + "The 'get_resource_class()' method has been deprecated. " + "Please implement the new 'get_resource_classes()' method in " + "core.tests.test_mixins.MixinModelAdminTest.BaseModelResourceClassOldTest" + ) + with self.assertWarns(DeprecationWarning, msg=msg): + self.assertEqual( + admin.get_export_resource_classes(self.request), [FooResource] + ) + with self.assertWarns(DeprecationWarning, msg=msg): + self.assertEqual( + admin.get_import_resource_classes(self.request), [FooResource] + ) + class BaseExportMixinTest(TestCase): class TestBaseExportMixin(mixins.BaseExportMixin): - def get_export_resource_kwargs(self, request, *args, **kwargs): - self.args = args + def get_export_resource_kwargs(self, request, **kwargs): self.kwargs = kwargs - return super().get_resource_kwargs(request, *args, **kwargs) + return super().get_resource_kwargs(request, **kwargs) - def test_get_data_for_export_sets_args_and_kwargs(self): + def test_get_data_for_export_sets_kwargs(self): """ issue 1268 Ensure that get_export_resource_kwargs() handles the args and kwargs arguments. @@ -175,14 +342,12 @@ def test_get_data_for_export_sets_args_and_kwargs(self): request = MagicMock(spec=HttpRequest) m = self.TestBaseExportMixin() m.model = Book - target_args = (1,) target_kwargs = {"a": 1} - m.get_data_for_export(request, Book.objects.none(), *target_args, **target_kwargs) - self.assertEqual(m.args, target_args) + m.get_data_for_export(request, Book.objects.none(), **target_kwargs) self.assertEqual(m.kwargs, target_kwargs) def test_get_export_formats(self): - class Format(object): + class Format: def __init__(self, can_export): self.val = can_export @@ -197,9 +362,47 @@ class CannotExportFormat(Format): def __init__(self): super().__init__(False) - m = mixins.BaseExportMixin() - m.formats = [CanExportFormat, CannotExportFormat] + class TestBaseExportMixin(mixins.BaseExportMixin): + @property + def export_formats(self): + return [CanExportFormat, CannotExportFormat] + + m = TestBaseExportMixin() formats = m.get_export_formats() self.assertEqual(1, len(formats)) - self.assertEqual('CanExportFormat', formats[0].__name__) + self.assertEqual("CanExportFormat", formats[0].__name__) + + +class ExportMixinTest(TestCase): + class TestExportMixin(admin.ExportMixin): + def __init__(self, export_form) -> None: + super().__init__() + self.export_form = export_form + + def get_export_form(self): + return self.export_form + + class TestExportForm(forms.ExportForm): + pass + + def test_get_export_form(self): + m = admin.ExportMixin() + self.assertEqual(admin.ExportMixin.export_form_class, m.get_export_form_class()) + + def test_get_export_form_with_custom_form(self): + m = self.TestExportMixin(self.TestExportForm) + self.assertEqual(self.TestExportForm, m.get_export_form()) + + +class BaseExportImportMixinTest(TestCase): + class TestMixin(mixins.BaseImportExportMixin): + pass + + def test_get_resource_kwargs(self): + mixin_instance = self.TestMixin() + test_kwargs = {"key1": "value1", "key2": "value2"} + mock_request = MagicMock(spec=HttpRequest) + result = mixin_instance.get_resource_kwargs(mock_request, **test_kwargs) + + self.assertEqual(result, test_kwargs) diff --git a/tests/core/tests/test_model_resource_fields_generate_widgets.py b/tests/core/tests/test_model_resource_fields_generate_widgets.py new file mode 100644 index 000000000..16edf5e5a --- /dev/null +++ b/tests/core/tests/test_model_resource_fields_generate_widgets.py @@ -0,0 +1,211 @@ +from unittest import TestCase + +import django +from core.models import WithPositiveIntegerFields +from django.contrib.contenttypes import fields as contenttype_fields +from django.contrib.postgres import fields as postgres +from django.contrib.postgres import search as postgres_search +from django.contrib.postgres.fields import ranges as postgres_ranges +from django.db import models +from django.db.models.fields.related import ForeignKey, RelatedField + +from import_export import widgets +from import_export.resources import ModelResource +from import_export.widgets import ForeignKeyWidget + + +class ExampleResource(ModelResource): + class Meta: + model = WithPositiveIntegerFields + + +class TestFieldWidgetMapping(TestCase): + def test_field_has_correct_widget(self): + resource = ExampleResource() + with self.subTest("PositiveBigIntegerField"): + self.assertIsInstance(resource.fields["big"].widget, widgets.IntegerWidget) + with self.subTest("PositiveSmallIntegerField"): + self.assertIsInstance( + resource.fields["small"].widget, + widgets.IntegerWidget, + ) + + def test_all_db_fields_has_widgets(self): + all_django_fields_classes = self._get_all_django_model_field_subclasses() + expected_has_default_widget = self._get_fields_with_expected_default_widget() + expected_not_presented_fields = ( + self._get_expected_not_presented_in_test_field_subclasses() + ) + all_fields = self._get_django_fields_for_check_widget() + + field_instance_by_field_cls = {field.__class__: field for field in all_fields} + + for field_cls, field in field_instance_by_field_cls.items(): + with self.subTest(msg=field_cls.__name__): + resource_field = ModelResource.field_from_django_field( + "test", field, False + ) + widget = resource_field.widget + if field_cls in expected_has_default_widget: + self.assertEqual( + widget.__class__, + widgets.Widget, + msg=( + f"{field_cls.__name__} " + "expected default widget " + f"actual {widget.__class__}" + ), + ) + else: + self.assertNotEqual( + widget.__class__, + widgets.Widget, + msg=f"{field_cls.__name__} has default widget class", + ) + + # if in new version django will be added new field subclass + # this subtest should fail + for field_cls in all_django_fields_classes: + if field_cls in expected_not_presented_fields: + continue + with self.subTest(msg=field_cls.__name__): + self.assertIn( + field_cls, + field_instance_by_field_cls, + msg=f"{field_cls.__name__} not presented in test fields", + ) + + def _get_fields_with_expected_default_widget(self): + """ + Returns set of django.db.models.field.Field subclasses + which expected has default Widget in ModelResource + """ + expected_has_default_widget = { + models.BinaryField, + models.FileField, + models.FilePathField, + models.GenericIPAddressField, + models.ImageField, + models.IPAddressField, + models.TextField, + models.UUIDField, + postgres.BigIntegerRangeField, + postgres.CITextField, + postgres.DateRangeField, + postgres.DateTimeRangeField, + postgres.DecimalRangeField, + postgres.HStoreField, + postgres.IntegerRangeField, + postgres.RangeField, + } + return expected_has_default_widget + + def _get_expected_not_presented_in_test_field_subclasses(self): + """ + Return set of django.db.models.field.Field subclasses + which expected NOT presented in this test in + _get_django_fields_for_check_widget + """ + expected_not_presented_fields = { + contenttype_fields.GenericRelation, + models.ForeignObject, + postgres_search.SearchQueryField, + postgres_search.SearchVectorField, + RelatedField, + postgres_ranges.ContinuousRangeField, + postgres_search._Float4Field, + } + if django.VERSION >= (5, 0): + expected_not_presented_fields |= {models.GeneratedField} + if django.VERSION >= (5, 1): + expected_not_presented_fields |= {contenttype_fields.GenericForeignKey} + if django.VERSION >= (5, 2): + expected_not_presented_fields |= {models.CompositePrimaryKey} + return expected_not_presented_fields + + def _get_all_django_model_field_subclasses(self): + """ + returns list of classes - all subclasses for django.db.models.field.Field + """ + return self._collect_all_clas_children(models.Field) + + def _collect_all_clas_children(self, clas): + children = [] + for child_clas in clas.__subclasses__(): + children.append(child_clas) + children.extend(self._collect_all_clas_children(child_clas)) + return children + + def _get_django_fields_for_check_widget(self): + """ + Return list of field instances for all checking field classes + """ + fields = [ + models.AutoField(), + models.BigAutoField(), + models.BigIntegerField(), + models.BinaryField(), + models.BooleanField(), + models.CharField(), + models.CommaSeparatedIntegerField(), + models.DateField(), + models.DateTimeField(), + models.DecimalField(), + models.DurationField(), + models.EmailField(), + models.FileField(), + models.FilePathField(), + models.FloatField(), + models.ForeignKey(WithPositiveIntegerFields, on_delete=models.PROTECT), + models.GenericIPAddressField(), + models.ImageField(), + models.IntegerField(), + models.IPAddressField(), + models.JSONField(), + models.ManyToManyField(WithPositiveIntegerFields), + models.NullBooleanField(), + models.OneToOneField(WithPositiveIntegerFields, on_delete=models.PROTECT), + models.OrderWrt(), + models.PositiveBigIntegerField(), + models.PositiveIntegerField(), + models.PositiveSmallIntegerField(), + models.SlugField(), + models.SmallAutoField(), + models.SmallIntegerField(), + models.TextField(), + models.TimeField(), + models.URLField(), + models.UUIDField(), + postgres.ArrayField(models.CharField), + postgres.BigIntegerRangeField(), + postgres.CICharField(), + postgres.CIEmailField(), + postgres.CITextField(), + postgres.DateRangeField(), + postgres.DateTimeRangeField(), + postgres.DecimalRangeField(), + postgres.HStoreField(), + postgres.IntegerRangeField(), + postgres.JSONField(), + postgres.RangeField(), + ] + return fields + + def test_custom_fk_field(self): + # issue 1817 - if a 'custom' foreign key field is provided, then this should + # be handled when widgets are defined + class CustomForeignKey(ForeignKey): + def __init__( + self, + to, + on_delete, + **kwargs, + ): + super().__init__(to, on_delete, **kwargs) + + resource_field = ModelResource.field_from_django_field( + "custom_fk", + CustomForeignKey(WithPositiveIntegerFields, on_delete=models.SET_NULL), + False, + ) + self.assertEqual(ForeignKeyWidget, resource_field.widget.__class__) diff --git a/tests/core/tests/test_permissions.py b/tests/core/tests/test_permissions.py index 36500888c..5b1b25100 100644 --- a/tests/core/tests/test_permissions.py +++ b/tests/core/tests/test_permissions.py @@ -1,114 +1,177 @@ import os.path +from core.models import Category +from core.tests.admin_integration.mixins import AdminTestMixin from django.contrib.auth.models import Permission, User +from django.contrib.contenttypes.models import ContentType from django.test.testcases import TestCase from django.test.utils import override_settings +from django.urls import reverse -class ImportExportPermissionTest(TestCase): - +class ImportExportPermissionTest(AdminTestMixin, TestCase): def setUp(self): - user = User.objects.create_user('admin', 'admin@example.com', - 'password') + user = User.objects.create_user("admin", "admin@example.com", "password") user.is_staff = True user.is_superuser = False user.save() self.user = user - self.client.login(username='admin', password='password') + self.client.login(username="admin", password="password") - def set_user_book_model_permission(self, action): - permission = Permission.objects.get(codename="%s_book" % action) + def set_user_model_permission(self, action, model_name): + permission = Permission.objects.get(codename=f"{action}_{model_name}") self.user.user_permissions.add(permission) - @override_settings(IMPORT_EXPORT_IMPORT_PERMISSION_CODE='change') + @override_settings(IMPORT_EXPORT_IMPORT_PERMISSION_CODE="change") def test_import(self): # user has no permission to import - response = self.client.get('/admin/core/book/import/') + response = self.client.get(self.book_import_url) self.assertEqual(response.status_code, 403) # POST the import form - input_format = '0' + input_format = "0" filename = os.path.join( - os.path.dirname(__file__), - os.path.pardir, - 'exports', - 'books.csv') + os.path.dirname(__file__), os.path.pardir, "exports", "books.csv" + ) with open(filename, "rb") as f: data = { - 'input_format': input_format, - 'import_file': f, + "format": input_format, + "import_file": f, } - response = self.client.post('/admin/core/book/import/', data) + response = self.client.post(self.book_import_url, data) self.assertEqual(response.status_code, 403) - response = self.client.post('/admin/core/book/process_import/', {}) + response = self.client.post(self.book_process_import_url, {}) self.assertEqual(response.status_code, 403) # user has sufficient permission to import - self.set_user_book_model_permission('change') + self.set_user_model_permission("change", "book") - response = self.client.get('/admin/core/book/import/') + response = self.client.get(self.book_import_url) self.assertEqual(response.status_code, 200) # POST the import form - input_format = '0' + input_format = "0" filename = os.path.join( - os.path.dirname(__file__), - os.path.pardir, - 'exports', - 'books.csv') + os.path.dirname(__file__), os.path.pardir, "exports", "books.csv" + ) with open(filename, "rb") as f: data = { - 'input_format': input_format, - 'import_file': f, + "format": input_format, + "import_file": f, } - response = self.client.post('/admin/core/book/import/', data) + response = self.client.post(self.book_import_url, data) self.assertEqual(response.status_code, 200) - confirm_form = response.context['confirm_form'] + confirm_form = response.context["confirm_form"] data = confirm_form.initial - response = self.client.post('/admin/core/book/process_import/', data) + response = self.client.post(self.book_process_import_url, data) self.assertEqual(response.status_code, 302) - - @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE='change') - def test_import_with_permission_set(self): - response = self.client.get('/admin/core/book/export/') + @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE="change") + def test_export_with_permission_set(self): + response = self.client.get(self.book_export_url) self.assertEqual(response.status_code, 403) - data = {'file_format': '0'} - response = self.client.post('/admin/core/book/export/', data) + data = {"format": "0"} + response = self.client.post(self.book_export_url, data) self.assertEqual(response.status_code, 403) - self.set_user_book_model_permission('change') - response = self.client.get('/admin/core/book/export/') + self.set_user_model_permission("change", "book") + response = self.client.get(self.book_export_url) self.assertEqual(response.status_code, 200) - data = {'file_format': '0'} - response = self.client.post('/admin/core/book/export/', data) + data = {"format": "0"} + response = self.client.post(self.book_export_url, data) self.assertEqual(response.status_code, 200) - @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE='add') + @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE="change") + def test_export_action_with_permission_set(self): + self.cat1 = Category.objects.create(name="Cat 1") + data = { + "action": ["export_admin_action"], + "_selected_action": [str(self.cat1.id)], + } + response = self.client.post(self.category_change_url, data) + self.assertEqual(response.status_code, 403) + + self.set_user_model_permission("change", "category") + response = self.client.post(self.category_change_url, data) + self.assertEqual(response.status_code, 200) + + @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE="add") def test_check_export_button(self): - self.set_user_book_model_permission('change') + self.set_user_model_permission("change", "book") - response = self.client.get('/admin/core/book/') + response = self.client.get(self.core_book_url) widget = "import_link" self.assertIn(widget, response.content.decode()) widget = "export_link" self.assertNotIn(widget, response.content.decode()) - @override_settings(IMPORT_EXPORT_IMPORT_PERMISSION_CODE='add') + @override_settings(IMPORT_EXPORT_IMPORT_PERMISSION_CODE="add") def test_check_import_button(self): - self.set_user_book_model_permission('change') + self.set_user_model_permission("change", "book") - response = self.client.get('/admin/core/book/') + response = self.client.get(self.core_book_url) widget = "import_link" self.assertNotIn(widget, response.content.decode()) widget = "export_link" self.assertIn(widget, response.content.decode()) + + @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE="export") + def test_export_button_for_export_permission(self): + content_type = ContentType.objects.get_for_model(Category) + Permission.objects.create( + codename="export_category", + name="Can export category", + content_type=content_type, + ) + self.set_user_model_permission("view", "category") + self.cat1 = Category.objects.create(name="Cat 1") + self.change_url = reverse( + "%s:%s_%s_change" + % ( + "admin", + "core", + "category", + ), + args=[self.cat1.pk], + ) + response = self.client.get(self.change_url) + export_btn = ( + '' + ) + self.assertNotIn(export_btn, response.content.decode()) + + # add export permission and the button should be displayed + self.set_user_model_permission("export", "category") + response = self.client.get(self.change_url) + self.assertIn(export_btn, response.content.decode()) + + @override_settings(IMPORT_EXPORT_EXPORT_PERMISSION_CODE="export") + def test_action_dropdown_contains_export_action(self): + content_type = ContentType.objects.get_for_model(Category) + Permission.objects.create( + codename="export_category", + name="Can export category", + content_type=content_type, + ) + self.set_user_model_permission("view", "category") + self.cat1 = Category.objects.create(name="Cat 1") + + response = self.client.get(self.category_change_url) + export_option = ( + '' + ) + self.assertNotIn(export_option, response.content.decode()) + + # add export permission and the button should be displayed + self.set_user_model_permission("export", "category") + response = self.client.get(self.category_change_url) + self.assertIn(export_option, response.content.decode()) diff --git a/tests/core/tests/test_resources.py b/tests/core/tests/test_resources.py deleted file mode 100644 index 7e33edb0a..000000000 --- a/tests/core/tests/test_resources.py +++ /dev/null @@ -1,1983 +0,0 @@ -import json -from collections import OrderedDict -from copy import deepcopy -from datetime import date -from decimal import Decimal, InvalidOperation -from unittest import mock, skip, skipIf, skipUnless - -import django -import tablib -from django.conf import settings -from django.contrib.auth.models import User -from django.core.exceptions import ImproperlyConfigured, ValidationError -from django.core.paginator import Paginator -from django.db import IntegrityError -from django.db.models import Count -from django.db.utils import ConnectionDoesNotExist -from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature -from django.utils.encoding import force_str -from django.utils.html import strip_tags - -from import_export import fields, resources, results, widgets -from import_export.instance_loaders import ModelInstanceLoader -from import_export.resources import Diff - -from ..models import ( - Author, - Book, - Category, - Entry, - Person, - Profile, - Role, - WithDefault, - WithDynamicDefault, - WithFloatField, -) - -if django.VERSION[0] >= 3: - from django.core.exceptions import FieldDoesNotExist -else: - from django.db.models.fields import FieldDoesNotExist - - -class MyResource(resources.Resource): - name = fields.Field() - email = fields.Field() - extra = fields.Field() - - class Meta: - export_order = ('email', 'name') - - -class ResourceTestCase(TestCase): - - def setUp(self): - self.my_resource = MyResource() - - def test_fields(self): - """Check that fields were determined correctly """ - - # check that our fields were determined - self.assertIn('name', self.my_resource.fields) - - # check that resource instance fields attr isn't link to resource cls - # fields - self.assertFalse( - MyResource.fields is self.my_resource.fields - ) - - # dynamically add new resource field into resource instance - self.my_resource.fields.update( - OrderedDict([ - ('new_field', fields.Field()), - ]) - ) - - # check that new field in resource instance fields - self.assertIn( - 'new_field', - self.my_resource.fields - ) - - # check that new field not in resource cls fields - self.assertNotIn( - 'new_field', - MyResource.fields - ) - - def test_field_column_name(self): - field = self.my_resource.fields['name'] - self.assertIn(field.column_name, 'name') - - def test_meta(self): - self.assertIsInstance(self.my_resource._meta, - resources.ResourceOptions) - - @mock.patch("builtins.dir") - def test_new_handles_null_options(self, mock_dir): - # #1163 - simulates a call to dir() returning additional attributes - mock_dir.return_value = ['attrs'] - class A(MyResource): - pass - - A() - - def test_get_export_order(self): - self.assertEqual(self.my_resource.get_export_headers(), - ['email', 'name', 'extra']) - - # Issue 140 Attributes aren't inherited by subclasses - def test_inheritance(self): - class A(MyResource): - inherited = fields.Field() - - class Meta: - import_id_fields = ('email',) - - class B(A): - local = fields.Field() - - class Meta: - export_order = ('email', 'extra') - - resource = B() - self.assertIn('name', resource.fields) - self.assertIn('inherited', resource.fields) - self.assertIn('local', resource.fields) - self.assertEqual(resource.get_export_headers(), - ['email', 'extra', 'name', 'inherited', 'local']) - self.assertEqual(resource._meta.import_id_fields, ('email',)) - - def test_inheritance_with_custom_attributes(self): - class A(MyResource): - inherited = fields.Field() - - class Meta: - import_id_fields = ('email',) - custom_attribute = True - - class B(A): - local = fields.Field() - - resource = B() - self.assertEqual(resource._meta.custom_attribute, True) - - def test_get_use_transactions_defined_in_resource(self): - class A(MyResource): - class Meta: - use_transactions = True - resource = A() - self.assertTrue(resource.get_use_transactions()) - - def test_get_field_name_raises_AttributeError(self): - err = "Field x does not exists in resource" - with self.assertRaisesRegex(AttributeError, err): - self.my_resource.get_field_name('x') - - def test_init_instance_raises_NotImplementedError(self): - with self.assertRaises(NotImplementedError): - self.my_resource.init_instance([]) - - -class AuthorResource(resources.ModelResource): - - books = fields.Field( - column_name='books', - attribute='book_set', - readonly=True, - ) - - class Meta: - model = Author - export_order = ('name', 'books') - - -class BookResource(resources.ModelResource): - published = fields.Field(column_name='published_date') - - class Meta: - model = Book - exclude = ('imported', ) - - -class BookResourceWithLineNumberLogger(BookResource): - def __init__(self, *args, **kwargs): - self.before_lines = [] - self.after_lines = [] - return super().__init__(*args, **kwargs) - - def before_import_row(self,row, row_number=None, **kwargs): - self.before_lines.append(row_number) - - def after_import_row(self, row, row_result, row_number=None, **kwargs): - self.after_lines.append(row_number) - - -class CategoryResource(resources.ModelResource): - - class Meta: - model = Category - - -class ProfileResource(resources.ModelResource): - class Meta: - model = Profile - exclude = ('user', ) - - -class WithDefaultResource(resources.ModelResource): - class Meta: - model = WithDefault - fields = ('name',) - - -class HarshRussianWidget(widgets.CharWidget): - def clean(self, value, row=None, *args, **kwargs): - raise ValueError("Ова вриједност је страшна!") - - -class AuthorResourceWithCustomWidget(resources.ModelResource): - - class Meta: - model = Author - - @classmethod - def widget_from_django_field(cls, f, default=widgets.Widget): - if f.name == 'name': - return HarshRussianWidget - result = default - internal_type = f.get_internal_type() if callable(getattr(f, "get_internal_type", None)) else "" - if internal_type in cls.WIDGETS_MAP: - result = cls.WIDGETS_MAP[internal_type] - if isinstance(result, str): - result = getattr(cls, result)(f) - return result - - -class ModelResourceTest(TestCase): - def setUp(self): - self.resource = BookResource() - - self.book = Book.objects.create(name="Some book") - self.dataset = tablib.Dataset(headers=['id', 'name', 'author_email', - 'price']) - row = [self.book.pk, 'Some book', 'test@example.com', "10.25"] - self.dataset.append(row) - - def test_default_instance_loader_class(self): - self.assertIs(self.resource._meta.instance_loader_class, - ModelInstanceLoader) - - def test_fields(self): - fields = self.resource.fields - self.assertIn('id', fields) - self.assertIn('name', fields) - self.assertIn('author_email', fields) - self.assertIn('price', fields) - - def test_fields_foreign_key(self): - fields = self.resource.fields - self.assertIn('author', fields) - widget = fields['author'].widget - self.assertIsInstance(widget, widgets.ForeignKeyWidget) - self.assertEqual(widget.model, Author) - - def test_fields_m2m(self): - fields = self.resource.fields - self.assertIn('categories', fields) - - def test_excluded_fields(self): - self.assertNotIn('imported', self.resource.fields) - - def test_init_instance(self): - instance = self.resource.init_instance() - self.assertIsInstance(instance, Book) - - def test_default(self): - self.assertEqual(WithDefaultResource.fields['name'].clean({'name': ''}), 'foo_bar') - - def test_get_instance(self): - instance_loader = self.resource._meta.instance_loader_class( - self.resource) - self.resource._meta.import_id_fields = ['id'] - instance = self.resource.get_instance(instance_loader, - self.dataset.dict[0]) - self.assertEqual(instance, self.book) - - def test_get_instance_import_id_fields(self): - - class BookResource(resources.ModelResource): - name = fields.Field(attribute='name', widget=widgets.CharWidget()) - - class Meta: - model = Book - import_id_fields = ['name'] - - resource = BookResource() - instance_loader = resource._meta.instance_loader_class(resource) - instance = resource.get_instance(instance_loader, self.dataset.dict[0]) - self.assertEqual(instance, self.book) - - def test_get_instance_import_id_fields_with_custom_column_name(self): - class BookResource(resources.ModelResource): - name = fields.Field(attribute='name', column_name='book_name', widget=widgets.CharWidget()) - - class Meta: - model = Book - import_id_fields = ['name'] - - dataset = tablib.Dataset(headers=['id', 'book_name', 'author_email', 'price']) - row = [self.book.pk, 'Some book', 'test@example.com', "10.25"] - dataset.append(row) - - resource = BookResource() - instance_loader = resource._meta.instance_loader_class(resource) - instance = resource.get_instance(instance_loader, dataset.dict[0]) - self.assertEqual(instance, self.book) - - def test_get_instance_usually_defers_to_instance_loader(self): - self.resource._meta.import_id_fields = ['id'] - - instance_loader = self.resource._meta.instance_loader_class( - self.resource) - - with mock.patch.object(instance_loader, 'get_instance') as mocked_method: - row = self.dataset.dict[0] - self.resource.get_instance(instance_loader, row) - # instance_loader.get_instance() should have been called - mocked_method.assert_called_once_with(row) - - def test_get_instance_when_id_fields_not_in_dataset(self): - self.resource._meta.import_id_fields = ['id'] - - # construct a dataset with a missing "id" column - dataset = tablib.Dataset(headers=['name', 'author_email', 'price']) - dataset.append(['Some book', 'test@example.com', "10.25"]) - - instance_loader = self.resource._meta.instance_loader_class(self.resource) - - with mock.patch.object(instance_loader, 'get_instance') as mocked_method: - result = self.resource.get_instance(instance_loader, dataset.dict[0]) - # Resource.get_instance() should return None - self.assertIs(result, None) - # instance_loader.get_instance() should NOT have been called - mocked_method.assert_not_called() - - def test_get_export_headers(self): - headers = self.resource.get_export_headers() - self.assertEqual(headers, ['published_date', 'id', 'name', 'author', - 'author_email', 'published_time', 'price', - 'added', - 'categories', ]) - - def test_export(self): - with self.assertNumQueries(2): - dataset = self.resource.export(Book.objects.all()) - self.assertEqual(len(dataset), 1) - - def test_export_iterable(self): - with self.assertNumQueries(2): - dataset = self.resource.export(list(Book.objects.all())) - self.assertEqual(len(dataset), 1) - - def test_export_prefetch_related(self): - with self.assertNumQueries(3): - dataset = self.resource.export(Book.objects.prefetch_related("categories").all()) - self.assertEqual(len(dataset), 1) - - def test_iter_queryset(self): - qs = Book.objects.all() - with mock.patch.object(qs, "iterator") as mocked_method: - list(self.resource.iter_queryset(qs)) - mocked_method.assert_called_once_with(chunk_size=100) - - def test_iter_queryset_prefetch_unordered(self): - qsu = Book.objects.prefetch_related("categories").all() - qso = qsu.order_by('pk').all() - with mock.patch.object(qsu, "order_by") as mocked_method: - mocked_method.return_value = qso - list(self.resource.iter_queryset(qsu)) - mocked_method.assert_called_once_with("pk") - - def test_iter_queryset_prefetch_ordered(self): - qs = Book.objects.prefetch_related("categories").order_by('pk').all() - with mock.patch("import_export.resources.Paginator", autospec=True) as p: - p.return_value = Paginator(qs, 100) - list(self.resource.iter_queryset(qs)) - p.assert_called_once_with(qs, 100) - - def test_iter_queryset_prefetch_chunk_size(self): - class B(BookResource): - class Meta: - chunk_size = 1000 - paginator = "import_export.resources.Paginator" - qs = Book.objects.prefetch_related("categories").order_by('pk').all() - with mock.patch(paginator, autospec=True) as mocked_obj: - mocked_obj.return_value = Paginator(qs, 1000) - list(B().iter_queryset(qs)) - mocked_obj.assert_called_once_with(qs, 1000) - - def test_get_diff(self): - diff = Diff(self.resource, self.book, False) - book2 = Book(name="Some other book") - diff.compare_with(self.resource, book2) - html = diff.as_html() - headers = self.resource.get_export_headers() - self.assertEqual(html[headers.index('name')], - 'Some ' - 'other book') - self.assertFalse(html[headers.index('author_email')]) - - @skip("See: https://github.com/django-import-export/django-import-export/issues/311") - def test_get_diff_with_callable_related_manager(self): - resource = AuthorResource() - author = Author(name="Some author") - author.save() - author2 = Author(name="Some author") - self.book.author = author - self.book.save() - diff = Diff(self.resource, author, False) - diff.compare_with(self.resource, author2) - html = diff.as_html() - headers = resource.get_export_headers() - self.assertEqual(html[headers.index('books')], - 'core.Book.None') - - def test_import_data(self): - result = self.resource.import_data(self.dataset, raise_errors=True) - - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), 1) - self.assertTrue(result.rows[0].diff) - self.assertEqual(result.rows[0].import_type, - results.RowResult.IMPORT_TYPE_UPDATE) - - instance = Book.objects.get(pk=self.book.pk) - self.assertEqual(instance.author_email, 'test@example.com') - self.assertEqual(instance.price, Decimal("10.25")) - - @mock.patch("import_export.resources.connections") - def test_raised_ImproperlyConfigured_if_use_transactions_set_when_transactions_not_supported(self, mock_db_connections): - class Features(object): - supports_transactions = False - class DummyConnection(object): - features = Features() - - dummy_connection = DummyConnection() - mock_db_connections.__getitem__.return_value = dummy_connection - with self.assertRaises(ImproperlyConfigured): - self.resource.import_data( - self.dataset, - use_transactions=True, - ) - - def test_importing_with_line_number_logging(self): - resource = BookResourceWithLineNumberLogger() - result = resource.import_data(self.dataset, raise_errors=True) - self.assertEqual(resource.before_lines, [1]) - self.assertEqual(resource.after_lines, [1]) - - def test_import_data_raises_field_specific_validation_errors(self): - resource = AuthorResource() - dataset = tablib.Dataset(headers=['id', 'name', 'birthday']) - dataset.append(['', 'A.A.Milne', '1882test-01-18']) - - result = resource.import_data(dataset, raise_errors=False) - - self.assertTrue(result.has_validation_errors()) - self.assertIs(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_INVALID) - self.assertIn('birthday', result.invalid_rows[0].field_specific_errors) - - def test_collect_failed_rows(self): - resource = ProfileResource() - headers = ['id', 'user'] - # 'user' is a required field, the database will raise an error. - row = [None, None] - dataset = tablib.Dataset(row, headers=headers) - result = resource.import_data( - dataset, dry_run=True, use_transactions=True, - collect_failed_rows=True, - ) - self.assertEqual( - result.failed_dataset.headers, - ['id', 'user', 'Error'] - ) - self.assertEqual(len(result.failed_dataset), 1) - # We can't check the error message because it's package- and version-dependent - - def test_row_result_raise_errors(self): - resource = ProfileResource() - headers = ['id', 'user'] - # 'user' is a required field, the database will raise an error. - row = [None, None] - dataset = tablib.Dataset(row, headers=headers) - with self.assertRaises(IntegrityError): - resource.import_data( - dataset, dry_run=True, use_transactions=True, - raise_errors=True, - ) - - def test_collect_failed_rows_validation_error(self): - resource = ProfileResource() - row = ['1'] - dataset = tablib.Dataset(row, headers=['id']) - with mock.patch("import_export.resources.Field.save", side_effect=ValidationError("fail!")): - result = resource.import_data( - dataset, dry_run=True, use_transactions=True, - collect_failed_rows=True, - ) - self.assertEqual( - result.failed_dataset.headers, - ['id', 'Error'] - ) - self.assertEqual(1, len(result.failed_dataset), ) - self.assertEqual('1', result.failed_dataset.dict[0]['id']) - self.assertEqual("{'__all__': ['fail!']}", result.failed_dataset.dict[0]['Error']) - - def test_row_result_raise_ValidationError(self): - resource = ProfileResource() - row = ['1'] - dataset = tablib.Dataset(row, headers=['id']) - with mock.patch("import_export.resources.Field.save", side_effect=ValidationError("fail!")): - with self.assertRaisesRegex(ValidationError, "{'__all__': \\['fail!'\\]}") : - resource.import_data( - dataset, dry_run=True, use_transactions=True, - raise_errors=True, - ) - - def test_import_data_handles_widget_valueerrors_with_unicode_messages(self): - resource = AuthorResourceWithCustomWidget() - dataset = tablib.Dataset(headers=['id', 'name', 'birthday']) - dataset.append(['', 'A.A.Milne', '1882-01-18']) - - result = resource.import_data(dataset, raise_errors=False) - - self.assertTrue(result.has_validation_errors()) - self.assertIs(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_INVALID) - self.assertEqual( - result.invalid_rows[0].field_specific_errors['name'], - ["Ова вриједност је страшна!"] - ) - - def test_model_validation_errors_not_raised_when_clean_model_instances_is_false(self): - - class TestResource(resources.ModelResource): - class Meta: - model = Author - clean_model_instances = False - - resource = TestResource() - dataset = tablib.Dataset(headers=['id', 'name']) - dataset.append(['', '123']) - - result = resource.import_data(dataset, raise_errors=False) - self.assertFalse(result.has_validation_errors()) - self.assertEqual(len(result.invalid_rows), 0) - - def test_model_validation_errors_raised_when_clean_model_instances_is_true(self): - - class TestResource(resources.ModelResource): - class Meta: - model = Author - clean_model_instances = True - export_order = ['id', 'name', 'birthday'] - - # create test dataset - # NOTE: column order is deliberately strange - dataset = tablib.Dataset(headers=['name', 'id']) - dataset.append(['123', '1']) - - # run import_data() - resource = TestResource() - result = resource.import_data(dataset, raise_errors=False) - - # check has_validation_errors() - self.assertTrue(result.has_validation_errors()) - - # check the invalid row itself - invalid_row = result.invalid_rows[0] - self.assertEqual(invalid_row.error_count, 1) - self.assertEqual( - invalid_row.field_specific_errors, - {'name': ["'123' is not a valid value"]} - ) - # diff_header and invalid_row.values should match too - self.assertEqual( - result.diff_headers, - ['id', 'name', 'birthday'] - ) - self.assertEqual( - invalid_row.values, - ('1', '123', '---') - ) - - def test_known_invalid_fields_are_excluded_from_model_instance_cleaning(self): - - # The custom widget on the parent class should complain about - # 'name' first, preventing Author.full_clean() from raising the - # error as it does in the previous test - - class TestResource(AuthorResourceWithCustomWidget): - class Meta: - model = Author - clean_model_instances = True - - resource = TestResource() - dataset = tablib.Dataset(headers=['id', 'name']) - dataset.append(['', '123']) - - result = resource.import_data(dataset, raise_errors=False) - self.assertTrue(result.has_validation_errors()) - self.assertEqual(result.invalid_rows[0].error_count, 1) - self.assertEqual( - result.invalid_rows[0].field_specific_errors, - {'name': ["Ова вриједност је страшна!"]} - ) - - def test_import_data_error_saving_model(self): - row = list(self.dataset.pop()) - # set pk to something that would yield error - row[0] = 'foo' - self.dataset.append(row) - result = self.resource.import_data(self.dataset, raise_errors=False) - - self.assertTrue(result.has_errors()) - self.assertTrue(result.rows[0].errors) - actual = result.rows[0].errors[0].error - self.assertIsInstance(actual, (ValueError, InvalidOperation)) - self.assertIn(str(actual), {"could not convert string to float", "[]"}) - - def test_import_data_delete(self): - - class B(BookResource): - delete = fields.Field(widget=widgets.BooleanWidget()) - - def for_delete(self, row, instance): - return self.fields['delete'].clean(row) - - row = [self.book.pk, self.book.name, '1'] - dataset = tablib.Dataset(*[row], headers=['id', 'name', 'delete']) - result = B().import_data(dataset, raise_errors=True) - self.assertFalse(result.has_errors()) - self.assertEqual(result.rows[0].import_type, - results.RowResult.IMPORT_TYPE_DELETE) - self.assertFalse(Book.objects.filter(pk=self.book.pk)) - - def test_save_instance_with_dry_run_flag(self): - class B(BookResource): - def before_save_instance(self, instance, using_transactions, dry_run): - super().before_save_instance(instance, using_transactions, dry_run) - if dry_run: - self.before_save_instance_dry_run = True - else: - self.before_save_instance_dry_run = False - def save_instance(self, instance, using_transactions=True, dry_run=False): - super().save_instance(instance, using_transactions, dry_run) - if dry_run: - self.save_instance_dry_run = True - else: - self.save_instance_dry_run = False - def after_save_instance(self, instance, using_transactions, dry_run): - super().after_save_instance(instance, using_transactions, dry_run) - if dry_run: - self.after_save_instance_dry_run = True - else: - self.after_save_instance_dry_run = False - - resource = B() - resource.import_data(self.dataset, dry_run=True, raise_errors=True) - self.assertTrue(resource.before_save_instance_dry_run) - self.assertTrue(resource.save_instance_dry_run) - self.assertTrue(resource.after_save_instance_dry_run) - - resource.import_data(self.dataset, dry_run=False, raise_errors=True) - self.assertFalse(resource.before_save_instance_dry_run) - self.assertFalse(resource.save_instance_dry_run) - self.assertFalse(resource.after_save_instance_dry_run) - - @mock.patch("core.models.Book.save") - def test_save_instance_noop(self, mock_book): - book = Book.objects.first() - self.resource.save_instance(book, using_transactions=False, dry_run=True) - self.assertEqual(0, mock_book.call_count) - - @mock.patch("core.models.Book.save") - def test_delete_instance_noop(self, mock_book): - book = Book.objects.first() - self.resource.delete_instance(book, using_transactions=False, dry_run=True) - self.assertEqual(0, mock_book.call_count) - - def test_delete_instance_with_dry_run_flag(self): - class B(BookResource): - delete = fields.Field(widget=widgets.BooleanWidget()) - - def for_delete(self, row, instance): - return self.fields['delete'].clean(row) - - def before_delete_instance(self, instance, dry_run): - super().before_delete_instance(instance, dry_run) - if dry_run: - self.before_delete_instance_dry_run = True - else: - self.before_delete_instance_dry_run = False - - def delete_instance(self, instance, using_transactions=True, dry_run=False): - super().delete_instance(instance, using_transactions, dry_run) - if dry_run: - self.delete_instance_dry_run = True - else: - self.delete_instance_dry_run = False - - def after_delete_instance(self, instance, dry_run): - super().after_delete_instance(instance, dry_run) - if dry_run: - self.after_delete_instance_dry_run = True - else: - self.after_delete_instance_dry_run = False - - resource = B() - row = [self.book.pk, self.book.name, '1'] - dataset = tablib.Dataset(*[row], headers=['id', 'name', 'delete']) - resource.import_data(dataset, dry_run=True, raise_errors=True) - self.assertTrue(resource.before_delete_instance_dry_run) - self.assertTrue(resource.delete_instance_dry_run) - self.assertTrue(resource.after_delete_instance_dry_run) - - resource.import_data(dataset, dry_run=False, raise_errors=True) - self.assertFalse(resource.before_delete_instance_dry_run) - self.assertFalse(resource.delete_instance_dry_run) - self.assertFalse(resource.after_delete_instance_dry_run) - - def test_relationships_fields(self): - - class B(resources.ModelResource): - class Meta: - model = Book - fields = ('author__name',) - - author = Author.objects.create(name="Author") - self.book.author = author - resource = B() - result = resource.fields['author__name'].export(self.book) - self.assertEqual(result, author.name) - - def test_dehydrating_fields(self): - - class B(resources.ModelResource): - full_title = fields.Field(column_name="Full title") - - class Meta: - model = Book - fields = ('author__name', 'full_title') - - def dehydrate_full_title(self, obj): - return '%s by %s' % (obj.name, obj.author.name) - - author = Author.objects.create(name="Author") - self.book.author = author - resource = B() - full_title = resource.export_field(resource.get_fields()[0], self.book) - self.assertEqual(full_title, '%s by %s' % (self.book.name, - self.book.author.name)) - - def test_widget_format_in_fk_field(self): - class B(resources.ModelResource): - - class Meta: - model = Book - fields = ('author__birthday',) - widgets = { - 'author__birthday': {'format': '%Y-%m-%d'}, - } - - author = Author.objects.create(name="Author") - self.book.author = author - resource = B() - result = resource.fields['author__birthday'].export(self.book) - self.assertEqual(result, str(date.today())) - - def test_widget_kwargs_for_field(self): - - class B(resources.ModelResource): - - class Meta: - model = Book - fields = ('published',) - widgets = { - 'published': {'format': '%d.%m.%Y'}, - } - - resource = B() - self.book.published = date(2012, 8, 13) - result = resource.fields['published'].export(self.book) - self.assertEqual(result, "13.08.2012") - - def test_foreign_keys_export(self): - author1 = Author.objects.create(name='Foo') - self.book.author = author1 - self.book.save() - - dataset = self.resource.export(Book.objects.all()) - self.assertEqual(dataset.dict[0]['author'], author1.pk) - - def test_foreign_keys_import(self): - author2 = Author.objects.create(name='Bar') - headers = ['id', 'name', 'author'] - row = [None, 'FooBook', author2.pk] - dataset = tablib.Dataset(row, headers=headers) - self.resource.import_data(dataset, raise_errors=True) - - book = Book.objects.get(name='FooBook') - self.assertEqual(book.author, author2) - - def test_m2m_export(self): - cat1 = Category.objects.create(name='Cat 1') - cat2 = Category.objects.create(name='Cat 2') - self.book.categories.add(cat1) - self.book.categories.add(cat2) - - dataset = self.resource.export(Book.objects.all()) - self.assertEqual(dataset.dict[0]['categories'], - '%d,%d' % (cat1.pk, cat2.pk)) - - def test_m2m_import(self): - cat1 = Category.objects.create(name='Cat 1') - headers = ['id', 'name', 'categories'] - row = [None, 'FooBook', str(cat1.pk)] - dataset = tablib.Dataset(row, headers=headers) - self.resource.import_data(dataset, raise_errors=True) - - book = Book.objects.get(name='FooBook') - self.assertIn(cat1, book.categories.all()) - - def test_m2m_options_import(self): - cat1 = Category.objects.create(name='Cat 1') - cat2 = Category.objects.create(name='Cat 2') - headers = ['id', 'name', 'categories'] - row = [None, 'FooBook', "Cat 1|Cat 2"] - dataset = tablib.Dataset(row, headers=headers) - - class BookM2MResource(resources.ModelResource): - categories = fields.Field( - attribute='categories', - widget=widgets.ManyToManyWidget(Category, field='name', - separator='|') - ) - - class Meta: - model = Book - - resource = BookM2MResource() - resource.import_data(dataset, raise_errors=True) - book = Book.objects.get(name='FooBook') - self.assertIn(cat1, book.categories.all()) - self.assertIn(cat2, book.categories.all()) - - def test_related_one_to_one(self): - # issue #17 - Exception when attempting access something on the - # related_name - - user = User.objects.create(username='foo') - profile = Profile.objects.create(user=user) - Entry.objects.create(user=user) - Entry.objects.create(user=User.objects.create(username='bar')) - - class EntryResource(resources.ModelResource): - class Meta: - model = Entry - fields = ('user__profile', 'user__profile__is_private') - - resource = EntryResource() - dataset = resource.export(Entry.objects.all()) - self.assertEqual(dataset.dict[0]['user__profile'], profile.pk) - self.assertEqual(dataset.dict[0]['user__profile__is_private'], '1') - self.assertEqual(dataset.dict[1]['user__profile'], '') - self.assertEqual(dataset.dict[1]['user__profile__is_private'], '') - - def test_empty_get_queryset(self): - # issue #25 - Overriding queryset on export() fails when passed - # queryset has zero elements - dataset = self.resource.export(Book.objects.none()) - self.assertEqual(len(dataset), 0) - - def test_import_data_skip_unchanged(self): - def attempted_save(instance, real_dry_run): - self.fail('Resource attempted to save instead of skipping') - - # Make sure we test with ManyToMany related objects - cat1 = Category.objects.create(name='Cat 1') - cat2 = Category.objects.create(name='Cat 2') - self.book.categories.add(cat1) - self.book.categories.add(cat2) - dataset = self.resource.export() - - # Create a new resource that attempts to reimport the data currently - # in the database while skipping unchanged rows (i.e. all of them) - resource = deepcopy(self.resource) - resource._meta.skip_unchanged = True - # Fail the test if the resource attempts to save the row - resource.save_instance = attempted_save - result = resource.import_data(dataset, raise_errors=True) - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), len(dataset)) - self.assertTrue(result.rows[0].diff) - self.assertEqual(result.rows[0].import_type, - results.RowResult.IMPORT_TYPE_SKIP) - - # Test that we can suppress reporting of skipped rows - resource._meta.report_skipped = False - result = resource.import_data(dataset, raise_errors=True) - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), 0) - - def test_before_import_access_to_kwargs(self): - class B(BookResource): - def before_import(self, dataset, using_transactions, dry_run, **kwargs): - if 'extra_arg' in kwargs: - dataset.headers[dataset.headers.index('author_email')] = 'old_email' - dataset.insert_col(0, - lambda row: kwargs['extra_arg'], - header='author_email') - - resource = B() - result = resource.import_data(self.dataset, raise_errors=True, - extra_arg='extra@example.com') - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), 1) - instance = Book.objects.get(pk=self.book.pk) - self.assertEqual(instance.author_email, 'extra@example.com') - - def test_before_import_raises_error(self): - class B(BookResource): - def before_import(self, dataset, using_transactions, dry_run, **kwargs): - raise Exception('This is an invalid dataset') - - resource = B() - with self.assertRaises(Exception) as cm: - resource.import_data(self.dataset, raise_errors=True) - self.assertEqual("This is an invalid dataset", cm.exception.args[0]) - - def test_after_import_raises_error(self): - class B(BookResource): - def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): - raise Exception('This is an invalid dataset') - - resource = B() - with self.assertRaises(Exception) as cm: - resource.import_data(self.dataset, raise_errors=True) - self.assertEqual("This is an invalid dataset", cm.exception.args[0]) - - def test_link_to_nonexistent_field(self): - with self.assertRaises(FieldDoesNotExist) as cm: - class BrokenBook1(resources.ModelResource): - class Meta: - model = Book - fields = ('nonexistent__invalid',) - self.assertEqual("Book.nonexistent: Book has no field named 'nonexistent'", - cm.exception.args[0]) - - with self.assertRaises(FieldDoesNotExist) as cm: - class BrokenBook2(resources.ModelResource): - class Meta: - model = Book - fields = ('author__nonexistent',) - self.assertEqual("Book.author.nonexistent: Author has no field named " - "'nonexistent'", cm.exception.args[0]) - - def test_link_to_nonrelation_field(self): - with self.assertRaises(KeyError) as cm: - class BrokenBook1(resources.ModelResource): - class Meta: - model = Book - fields = ('published__invalid',) - self.assertEqual("Book.published is not a relation", - cm.exception.args[0]) - - with self.assertRaises(KeyError) as cm: - class BrokenBook2(resources.ModelResource): - class Meta: - model = Book - fields = ('author__name__invalid',) - self.assertEqual("Book.author.name is not a relation", - cm.exception.args[0]) - - def test_override_field_construction_in_resource(self): - class B(resources.ModelResource): - class Meta: - model = Book - fields = ('published',) - - @classmethod - def field_from_django_field(self, field_name, django_field, - readonly): - if field_name == 'published': - return {'sound': 'quack'} - - B() - self.assertEqual({'sound': 'quack'}, B.fields['published']) - - def test_readonly_annotated_field_import_and_export(self): - class B(BookResource): - total_categories = fields.Field('total_categories', readonly=True) - - class Meta: - model = Book - skip_unchanged = True - - cat1 = Category.objects.create(name='Cat 1') - self.book.categories.add(cat1) - - resource = B() - - # Verify that the annotated field is correctly exported - dataset = resource.export( - Book.objects.annotate(total_categories=Count('categories'))) - self.assertEqual(int(dataset.dict[0]['total_categories']), 1) - - # Verify that importing the annotated field raises no errors and that - # the rows are skipped - result = resource.import_data(dataset, raise_errors=True) - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), len(dataset)) - self.assertEqual( - result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) - - def test_follow_relationship_for_modelresource(self): - - class EntryResource(resources.ModelResource): - username = fields.Field(attribute='user__username', readonly=False) - - class Meta: - model = Entry - fields = ('id', ) - - def after_save_instance(self, instance, using_transactions, dry_run): - if not using_transactions and dry_run: - # we don't have transactions and we want to do a dry_run - pass - else: - instance.user.save() - - user = User.objects.create(username='foo') - entry = Entry.objects.create(user=user) - row = [ - entry.pk, - 'bar', - ] - self.dataset = tablib.Dataset(headers=['id', 'username']) - self.dataset.append(row) - result = EntryResource().import_data( - self.dataset, raise_errors=True, dry_run=False) - self.assertFalse(result.has_errors()) - self.assertEqual(User.objects.get(pk=user.pk).username, 'bar') - - def test_import_data_dynamic_default_callable(self): - - class DynamicDefaultResource(resources.ModelResource): - class Meta: - model = WithDynamicDefault - fields = ('id', 'name',) - - self.assertTrue(callable(DynamicDefaultResource.fields['name'].default)) - - resource = DynamicDefaultResource() - dataset = tablib.Dataset(headers=['id', 'name', ]) - dataset.append([1, None]) - dataset.append([2, None]) - resource.import_data(dataset, raise_errors=False) - objs = WithDynamicDefault.objects.all() - self.assertNotEqual(objs[0].name, objs[1].name) - - def test_float_field(self): - #433 - class R(resources.ModelResource): - class Meta: - model = WithFloatField - resource = R() - dataset = tablib.Dataset(headers=['id', 'f', ]) - dataset.append([None, None]) - dataset.append([None, '']) - resource.import_data(dataset, raise_errors=True) - self.assertEqual(WithFloatField.objects.all()[0].f, None) - self.assertEqual(WithFloatField.objects.all()[1].f, None) - - def test_get_db_connection_name(self): - class BookResource(resources.ModelResource): - class Meta: - using_db = 'other_db' - - self.assertEqual(BookResource().get_db_connection_name(), 'other_db') - self.assertEqual(CategoryResource().get_db_connection_name(), 'default') - - def test_import_data_raises_field_for_wrong_db(self): - class BookResource(resources.ModelResource): - class Meta: - using_db = 'wrong_db' - - with self.assertRaises(ConnectionDoesNotExist): - BookResource().import_data(self.dataset) - - -class ModelResourceTransactionTest(TransactionTestCase): - @skipUnlessDBFeature('supports_transactions') - def test_m2m_import_with_transactions(self): - resource = BookResource() - cat1 = Category.objects.create(name='Cat 1') - headers = ['id', 'name', 'categories'] - row = [None, 'FooBook', str(cat1.pk)] - dataset = tablib.Dataset(row, headers=headers) - - result = resource.import_data( - dataset, dry_run=True, use_transactions=True - ) - - row_diff = result.rows[0].diff - fields = resource.get_fields() - - id_field = resource.fields['id'] - id_diff = row_diff[fields.index(id_field)] - # id diff should exist because in rollbacked transaction - # FooBook has been saved - self.assertTrue(id_diff) - - category_field = resource.fields['categories'] - categories_diff = row_diff[fields.index(category_field)] - self.assertEqual(strip_tags(categories_diff), force_str(cat1.pk)) - - # check that it is really rollbacked - self.assertFalse(Book.objects.filter(name='FooBook')) - - @skipUnlessDBFeature('supports_transactions') - def test_m2m_import_with_transactions_error(self): - resource = ProfileResource() - headers = ['id', 'user'] - # 'user' is a required field, the database will raise an error. - row = [None, None] - dataset = tablib.Dataset(row, headers=headers) - - result = resource.import_data( - dataset, dry_run=True, use_transactions=True - ) - - # Ensure the error raised by the database has been saved. - self.assertTrue(result.has_errors()) - - # Ensure the rollback has worked properly. - self.assertEqual(Profile.objects.count(), 0) - - @skipUnlessDBFeature('supports_transactions') - def test_integrity_error_rollback_on_savem2m(self): - # savepoint_rollback() after an IntegrityError gives - # TransactionManagementError (#399) - class CategoryResourceRaisesIntegrityError(CategoryResource): - def save_m2m(self, instance, *args, **kwargs): - # force raising IntegrityError - Category.objects.create(name=instance.name) - - resource = CategoryResourceRaisesIntegrityError() - headers = ['id', 'name'] - rows = [ - [None, 'foo'], - ] - dataset = tablib.Dataset(*rows, headers=headers) - result = resource.import_data( - dataset, - use_transactions=True, - ) - self.assertTrue(result.has_errors()) - - def test_rollback_on_validation_errors_false(self): - """ Should create only one instance as the second one raises a ``ValidationError`` """ - resource = AuthorResource() - headers = ['id', 'name', 'birthday'] - rows = [ - ['', 'A.A.Milne', ''], - ['', '123', '1992test-01-18'], # raises ValidationError - ] - dataset = tablib.Dataset(*rows, headers=headers) - result = resource.import_data( - dataset, - use_transactions=True, - rollback_on_validation_errors=False, - ) - - # Ensure the validation error raised by the database has been saved. - self.assertTrue(result.has_validation_errors()) - - # Ensure that valid row resulted in an instance created. - self.assertEqual(Author.objects.count(), 1) - - def test_rollback_on_validation_errors_true(self): - """ - Should not create any instances as the second one raises a ``ValidationError`` - and ``rollback_on_validation_errors`` flag is set - """ - resource = AuthorResource() - headers = ['id', 'name', 'birthday'] - rows = [ - ['', 'A.A.Milne', ''], - ['', '123', '1992test-01-18'], # raises ValidationError - ] - dataset = tablib.Dataset(*rows, headers=headers) - result = resource.import_data( - dataset, - use_transactions=True, - rollback_on_validation_errors=True, - ) - - # Ensure the validation error raised by the database has been saved. - self.assertTrue(result.has_validation_errors()) - - # Ensure the rollback has worked properly, no instances were created. - self.assertFalse(Author.objects.exists()) - - -class ModelResourceFactoryTest(TestCase): - - def test_create(self): - BookResource = resources.modelresource_factory(Book) - self.assertIn('id', BookResource.fields) - self.assertEqual(BookResource._meta.model, Book) - - -@skipUnless( - 'postgresql' in settings.DATABASES['default']['ENGINE'], - 'Run only against Postgres') -class PostgresTests(TransactionTestCase): - # Make sure to start the sequences back at 1 - reset_sequences = True - - def test_create_object_after_importing_dataset_with_id(self): - dataset = tablib.Dataset(headers=['id', 'name']) - dataset.append([1, 'Some book']) - resource = BookResource() - result = resource.import_data(dataset) - self.assertFalse(result.has_errors()) - try: - Book.objects.create(name='Some other book') - except IntegrityError: - self.fail('IntegrityError was raised.') - -if 'postgresql' in settings.DATABASES['default']['ENGINE']: - from django.contrib.postgres.fields import ArrayField - from django.db import models - try: - from django.db.models import JSONField - except ImportError: - from django.contrib.postgres.fields import JSONField - - - class BookWithChapters(models.Model): - name = models.CharField('Book name', max_length=100) - chapters = ArrayField(models.CharField(max_length=100), default=list) - data = JSONField(null=True) - - - class BookWithChaptersResource(resources.ModelResource): - - class Meta: - model = BookWithChapters - fields = ( - 'id', - 'name', - 'chapters', - 'data', - ) - - - class TestExportArrayField(TestCase): - - def test_exports_array_field(self): - dataset_headers = ["id", "name", "chapters"] - chapters = ["Introduction", "Middle Chapter", "Ending"] - dataset_row = ["1", "Book With Chapters", ",".join(chapters)] - dataset = tablib.Dataset(headers=dataset_headers) - dataset.append(dataset_row) - book_with_chapters_resource = resources.modelresource_factory(model=BookWithChapters)() - result = book_with_chapters_resource.import_data(dataset, dry_run=False) - - self.assertFalse(result.has_errors()) - book_with_chapters = list(BookWithChapters.objects.all())[0] - self.assertListEqual(book_with_chapters.chapters, chapters) - - class TestImportArrayField(TestCase): - - def setUp(self): - self.resource = BookWithChaptersResource() - self.chapters = ["Introduction", "Middle Chapter", "Ending"] - self.book = BookWithChapters.objects.create(name='foo') - self.dataset = tablib.Dataset(headers=['id', 'name', 'chapters']) - row = [self.book.id, 'Some book', ",".join(self.chapters)] - self.dataset.append(row) - - def test_import_of_data_with_array(self): - self.assertListEqual(self.book.chapters, []) - result = self.resource.import_data(self.dataset, raise_errors=True) - - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), 1) - - self.book.refresh_from_db() - self.assertEqual(self.book.chapters, self.chapters) - - class TestExportJsonField(TestCase): - - def setUp(self): - self.json_data = {"some_key": "some_value"} - self.book = BookWithChapters.objects.create(name='foo', data=self.json_data) - - def test_export_field_with_appropriate_format(self): - resource = resources.modelresource_factory(model=BookWithChapters)() - result = resource.export(BookWithChapters.objects.all()) - - assert result[0][3] == json.dumps(self.json_data) - - - class TestImportJsonField(TestCase): - - def setUp(self): - self.resource = BookWithChaptersResource() - self.data = {"some_key": "some_value"} - self.json_data = json.dumps(self.data) - self.book = BookWithChapters.objects.create(name='foo') - self.dataset = tablib.Dataset(headers=['id', 'name', 'data']) - row = [self.book.id, 'Some book', self.json_data] - self.dataset.append(row) - - def test_sets_json_data_when_model_field_is_empty(self): - self.assertIsNone(self.book.data) - result = self.resource.import_data(self.dataset, raise_errors=True) - - self.assertFalse(result.has_errors()) - self.assertEqual(len(result.rows), 1) - - self.book.refresh_from_db() - self.assertEqual(self.book.data, self.data) - - -class ForeignKeyWidgetFollowRelationship(TestCase): - def setUp(self): - self.user = User.objects.create(username='foo') - self.role = Role.objects.create(user=self.user) - self.person = Person.objects.create(role=self.role) - - def test_export(self): - class MyPersonResource(resources.ModelResource): - role = fields.Field( - column_name='role', - attribute='role', - widget=widgets.ForeignKeyWidget(Role, field='user__username') - ) - - class Meta: - model = Person - fields = ['id', 'role'] - - resource = MyPersonResource() - dataset = resource.export(Person.objects.all()) - self.assertEqual(len(dataset), 1) - self.assertEqual(dataset[0][0], 'foo') - - self.role.user = None - self.role.save() - - resource = MyPersonResource() - dataset = resource.export(Person.objects.all()) - self.assertEqual(len(dataset), 1) - self.assertEqual(dataset[0][0], None) - - -class ManyRelatedManagerDiffTest(TestCase): - fixtures = ["category", "book"] - - def setUp(self): - pass - - def test_related_manager_diff(self): - dataset_headers = ["id", "name", "categories"] - dataset_row = ["1", "Test Book", "1"] - original_dataset = tablib.Dataset(headers=dataset_headers) - original_dataset.append(dataset_row) - dataset_row[2] = "2" - changed_dataset = tablib.Dataset(headers=dataset_headers) - changed_dataset.append(dataset_row) - - book_resource = BookResource() - export_headers = book_resource.get_export_headers() - - add_result = book_resource.import_data(original_dataset, dry_run=False) - expected_value = '1' - self.check_value(add_result, export_headers, expected_value) - change_result = book_resource.import_data(changed_dataset, dry_run=False) - expected_value = '12' - self.check_value(change_result, export_headers, expected_value) - - def check_value(self, result, export_headers, expected_value): - self.assertEqual(len(result.rows), 1) - diff = result.rows[0].diff - self.assertEqual(diff[export_headers.index("categories")], - expected_value) - - -@mock.patch("import_export.resources.Diff", spec=True) -class SkipDiffTest(TestCase): - """ - Tests that the meta attribute 'skip_diff' means that no diff operations are called. - 'copy.deepcopy' cannot be patched at class level because it causes interferes with - ``resources.Resource.__init__()``. - """ - def setUp(self): - class _BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_diff = True - - self.resource = _BookResource() - self.dataset = tablib.Dataset(headers=['id', 'name', 'birthday']) - self.dataset.append(['', 'A.A.Milne', '1882test-01-18']) - - def test_skip_diff(self, mock_diff): - with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: - self.resource.import_data(self.dataset) - mock_diff.return_value.compare_with.assert_not_called() - mock_diff.return_value.as_html.assert_not_called() - mock_deep_copy.assert_not_called() - - def test_skip_diff_for_delete_new_resource(self, mock_diff): - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_diff = True - - def for_delete(self, row, instance): - return True - - resource = BookResource() - with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: - resource.import_data(self.dataset) - mock_diff.return_value.compare_with.assert_not_called() - mock_diff.return_value.as_html.assert_not_called() - mock_deep_copy.assert_not_called() - - def test_skip_diff_for_delete_existing_resource(self, mock_diff): - book = Book.objects.create() - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_diff = True - - def get_or_init_instance(self, instance_loader, row): - return book, False - - def for_delete(self, row, instance): - return True - - resource = BookResource() - - with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: - resource.import_data(self.dataset, dry_run=True) - mock_diff.return_value.compare_with.assert_not_called() - mock_diff.return_value.as_html.assert_not_called() - mock_deep_copy.assert_not_called() - - def test_skip_diff_for_delete_skip_row_not_enabled_new_object(self, mock_diff): - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_diff = False - - def for_delete(self, row, instance): - return True - - resource = BookResource() - - with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: - resource.import_data(self.dataset, dry_run=True) - self.assertEqual(1, mock_diff.return_value.compare_with.call_count) - self.assertEqual(1, mock_deep_copy.call_count) - - def test_skip_row_returns_false_when_skip_diff_is_true(self, mock_diff): - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_unchanged = True - skip_diff = True - - resource = BookResource() - - with mock.patch('import_export.resources.Resource.get_import_fields') as mock_get_import_fields: - resource.import_data(self.dataset, dry_run=True) - self.assertEqual(2, mock_get_import_fields.call_count) - - -class SkipHtmlDiffTest(TestCase): - - def test_skip_html_diff(self): - class BookResource(resources.ModelResource): - - class Meta: - model = Book - skip_html_diff = True - - resource = BookResource() - self.dataset = tablib.Dataset(headers=['id', 'name', 'birthday']) - self.dataset.append(['', 'A.A.Milne', '1882test-01-18']) - - with mock.patch('import_export.resources.Diff.as_html') as mock_as_html: - resource.import_data(self.dataset, dry_run=True) - mock_as_html.assert_not_called() - - -class BulkTest(TestCase): - - def setUp(self): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - - self.resource = _BookResource() - rows = [('book_name',)] * 10 - self.dataset = tablib.Dataset(*rows, headers=['name']) - - def init_update_test_data(self): - [Book.objects.create(name='book_name') for _ in range(10)] - self.assertEqual(10, Book.objects.count()) - rows = Book.objects.all().values_list('id', 'name') - updated_rows = [(r[0], 'UPDATED') for r in rows] - self.dataset = tablib.Dataset(*updated_rows, headers=['id', 'name']) - - -class BulkCreateTest(BulkTest): - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_does_not_call_object_save(self, mock_bulk_create): - with mock.patch('core.models.Book.save') as mock_obj_save: - self.resource.import_data(self.dataset) - mock_obj_save.assert_not_called() - mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_batch_size_of_5(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 5 - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(2, mock_bulk_create.call_count) - mock_bulk_create.assert_called_with(mock.ANY, batch_size=5) - self.assertEqual(10, result.total_rows) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_no_batch_size(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = None - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(1, mock_bulk_create.call_count) - mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["new"]) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_called_dry_run(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = None - - resource = _BookResource() - result = resource.import_data(self.dataset, dry_run=True) - self.assertEqual(1, mock_bulk_create.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["new"]) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_not_called_when_not_using_transactions(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - - def import_data(self, dataset, dry_run=False, raise_errors=False, - use_transactions=None, collect_failed_rows=False, **kwargs): - # override so that we can enforce not using_transactions - using_transactions = False - return self.import_data_inner(dataset, dry_run, raise_errors, using_transactions, - collect_failed_rows, **kwargs) - - class Meta: - model = Book - use_bulk = True - - resource = _BookResource() - resource.import_data(self.dataset, dry_run=True) - mock_bulk_create.assert_not_called() - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_batch_size_of_4(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 4 - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(3, mock_bulk_create.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["new"]) - - def test_no_changes_for_errors_if_use_transactions_enabled(self): - with mock.patch('import_export.results.Result.has_errors') as mock_has_errors: - mock_has_errors.return_val = True - self.resource.import_data(self.dataset) - self.assertEqual(0, Book.objects.count()) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_use_bulk_disabled(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = False - - resource = _BookResource() - result = resource.import_data(self.dataset) - mock_bulk_create.assert_not_called() - self.assertEqual(10, Book.objects.count()) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["new"]) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_bad_batch_size_value(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 'a' - - resource = _BookResource() - with self.assertRaises(ValueError): - resource.import_data(self.dataset) - mock_bulk_create.assert_not_called() - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_negative_batch_size_value(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = -1 - - resource = _BookResource() - with self.assertRaises(ValueError): - resource.import_data(self.dataset) - mock_bulk_create.assert_not_called() - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_oversized_batch_size_value(self, mock_bulk_create): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 100 - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(1, mock_bulk_create.call_count) - mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["new"]) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_logs_exception(self, mock_bulk_create): - e = ValidationError("invalid field") - mock_bulk_create.side_effect = e - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 100 - resource = _BookResource() - with mock.patch("logging.Logger.exception") as mock_exception: - resource.import_data(self.dataset) - mock_exception.assert_called_with(e) - self.assertEqual(1, mock_exception.call_count) - - @mock.patch('core.models.Book.objects.bulk_create') - def test_bulk_create_raises_exception(self, mock_bulk_create): - mock_bulk_create.side_effect = ValidationError("invalid field") - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 100 - resource = _BookResource() - with self.assertRaises(ValidationError): - resource.import_data(self.dataset, raise_errors=True) - - def test_m2m_not_called_for_bulk(self): - mock_m2m_widget = mock.Mock(spec=widgets.ManyToManyWidget) - class BookM2MResource(resources.ModelResource): - categories = fields.Field( - attribute='categories', - widget=mock_m2m_widget - ) - class Meta: - model = Book - use_bulk = True - - resource = BookM2MResource() - self.dataset.append_col(["Cat 1|Cat 2"] * 10, header="categories") - resource.import_data(self.dataset, raise_errors=True) - mock_m2m_widget.assert_not_called() - - def test_force_init_instance(self): - class _BookResource(resources.ModelResource): - def get_instance(self, instance_loader, row): - raise AssertionError("should not be called") - - class Meta: - model = Book - force_init_instance = True - - resource = _BookResource() - self.assertIsNotNone(resource.get_or_init_instance(ModelInstanceLoader(resource), self.dataset[0])) - - -@skipIf(django.VERSION[0] == 2 and django.VERSION[1] < 2, "bulk_update not supported in this version of django") -class BulkUpdateTest(BulkTest): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - fields = ('id', 'name') - import_id_fields = ('id',) - - def setUp(self): - super().setUp() - self.init_update_test_data() - self.resource = self._BookResource() - - def test_bulk_update(self): - result = self.resource.import_data(self.dataset) - [self.assertEqual('UPDATED', b.name) for b in Book.objects.all()] - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["update"]) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_batch_size_of_4(self, mock_bulk_update): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 4 - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(3, mock_bulk_update.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["update"]) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_batch_size_of_5(self, mock_bulk_update): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = 5 - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(2, mock_bulk_update.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["update"]) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_no_batch_size(self, mock_bulk_update): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - batch_size = None - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(1, mock_bulk_update.call_count) - mock_bulk_update.assert_called_with(mock.ANY, mock.ANY, batch_size=None) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["update"]) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_not_called_when_not_using_transactions(self, mock_bulk_update): - class _BookResource(resources.ModelResource): - - def import_data(self, dataset, dry_run=False, raise_errors=False, - use_transactions=None, collect_failed_rows=False, **kwargs): - # override so that we can enforce not using_transactions - using_transactions = False - return self.import_data_inner(dataset, dry_run, raise_errors, using_transactions, - collect_failed_rows, **kwargs) - - class Meta: - model = Book - use_bulk = True - - resource = _BookResource() - resource.import_data(self.dataset, dry_run=True) - mock_bulk_update.assert_not_called() - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_called_for_dry_run(self, mock_bulk_update): - self.resource.import_data(self.dataset, dry_run=True) - self.assertEqual(1, mock_bulk_update.call_count) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_not_called_when_use_bulk_disabled(self, mock_bulk_update): - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = False - - resource = _BookResource() - result = resource.import_data(self.dataset) - self.assertEqual(10, Book.objects.count()) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["update"]) - mock_bulk_update.assert_not_called() - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_logs_exception(self, mock_bulk_update): - e = ValidationError("invalid field") - mock_bulk_update.side_effect = e - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - resource = _BookResource() - with mock.patch("logging.Logger.exception") as mock_exception: - resource.import_data(self.dataset) - mock_exception.assert_called_with(e) - self.assertEqual(1, mock_exception.call_count) - - @mock.patch('core.models.Book.objects.bulk_update') - def test_bulk_update_raises_exception(self, mock_bulk_update): - e = ValidationError("invalid field") - mock_bulk_update.side_effect = e - class _BookResource(resources.ModelResource): - class Meta: - model = Book - use_bulk = True - resource = _BookResource() - with self.assertRaises(ValidationError) as raised_exc: - resource.import_data(self.dataset, raise_errors=True) - self.assertEqual(e, raised_exc) - - -class BulkDeleteTest(BulkTest): - class DeleteBookResource(resources.ModelResource): - def for_delete(self, row, instance): - return True - - class Meta: - model = Book - use_bulk = True - - def setUp(self): - super().setUp() - self.resource = self.DeleteBookResource() - self.init_update_test_data() - - @mock.patch("core.models.Book.delete") - def test_bulk_delete_use_bulk_is_false(self, mock_obj_delete): - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = False - - self.resource = _BookResource() - self.resource.import_data(self.dataset) - self.assertEqual(10, mock_obj_delete.call_count) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_batch_size_of_4(self, mock_obj_manager): - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = True - batch_size = 4 - - self.resource = _BookResource() - result = self.resource.import_data(self.dataset) - self.assertEqual(3, mock_obj_manager.filter.return_value.delete.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["delete"]) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_batch_size_of_5(self, mock_obj_manager): - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = True - batch_size = 5 - - self.resource = _BookResource() - result = self.resource.import_data(self.dataset) - self.assertEqual(2, mock_obj_manager.filter.return_value.delete.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["delete"]) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_batch_size_is_none(self, mock_obj_manager): - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = True - batch_size = None - - self.resource = _BookResource() - result = self.resource.import_data(self.dataset) - self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count) - self.assertEqual(10, result.total_rows) - self.assertEqual(10, result.totals["delete"]) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_not_called_when_not_using_transactions(self, mock_obj_manager): - class _BookResource(self.DeleteBookResource): - def import_data(self, dataset, dry_run=False, raise_errors=False, - use_transactions=None, collect_failed_rows=False, **kwargs): - # override so that we can enforce not using_transactions - using_transactions = False - return self.import_data_inner(dataset, dry_run, raise_errors, using_transactions, - collect_failed_rows, **kwargs) - - class Meta: - model = Book - use_bulk = True - - resource = _BookResource() - resource.import_data(self.dataset, dry_run=True) - self.assertEqual(0, mock_obj_manager.filter.return_value.delete.call_count) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_called_for_dry_run(self, mock_obj_manager): - self.resource.import_data(self.dataset, dry_run=True) - self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_logs_exception(self, mock_obj_manager): - e = Exception("invalid") - mock_obj_manager.filter.return_value.delete.side_effect = e - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = True - resource = _BookResource() - with mock.patch("logging.Logger.exception") as mock_exception: - resource.import_data(self.dataset) - mock_exception.assert_called_with(e) - self.assertEqual(1, mock_exception.call_count) - - @mock.patch("core.models.Book.objects") - def test_bulk_delete_raises_exception(self, mock_obj_manager): - e = Exception("invalid") - mock_obj_manager.filter.return_value.delete.side_effect = e - class _BookResource(self.DeleteBookResource): - class Meta: - model = Book - use_bulk = True - resource = _BookResource() - with self.assertRaises(Exception) as raised_exc: - resource.import_data(self.dataset, raise_errors=True) - self.assertEqual(e, raised_exc) diff --git a/tests/core/tests/test_resources/__init__.py b/tests/core/tests/test_resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/tests/test_resources/test_bulk_operations.py b/tests/core/tests/test_resources/test_bulk_operations.py new file mode 100644 index 000000000..7b2c1159f --- /dev/null +++ b/tests/core/tests/test_resources/test_bulk_operations.py @@ -0,0 +1,585 @@ +from unittest import mock + +import tablib +from core.models import Book, UUIDBook +from django.core.exceptions import ValidationError +from django.test import TestCase + +from import_export import exceptions, fields, resources, widgets +from import_export.instance_loaders import ModelInstanceLoader + + +class BulkTest(TestCase): + def setUp(self): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + + self.resource = _BookResource() + rows = [(i + 1, "book_name") for i in range(10)] + self.dataset = tablib.Dataset(*rows, headers=["id", "name"]) + + def init_update_test_data(self, model=Book): + [model.objects.create(name="book_name") for _ in range(10)] + self.assertEqual(10, model.objects.count()) + rows = model.objects.all().values_list("id", "name") + updated_rows = [(r[0], "UPDATED") for r in rows] + self.dataset = tablib.Dataset(*updated_rows, headers=["id", "name"]) + + +class BulkCreateTest(BulkTest): + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_does_not_call_object_save(self, mock_bulk_create): + with mock.patch("core.models.Book.save") as mock_obj_save: + self.resource.import_data(self.dataset) + mock_obj_save.assert_not_called() + mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_batch_size_of_5(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 5 + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(2, mock_bulk_create.call_count) + mock_bulk_create.assert_called_with(mock.ANY, batch_size=5) + self.assertEqual(10, result.total_rows) + + @mock.patch("core.models.UUIDBook.objects.bulk_create") + def test_bulk_create_uuid_model(self, mock_bulk_create): + """Test create of a Model which defines uuid not pk (issue #1274)""" + + class _UUIDBookResource(resources.ModelResource): + class Meta: + model = UUIDBook + use_bulk = True + batch_size = 5 + fields = ( + "id", + "name", + ) + + resource = _UUIDBookResource() + result = resource.import_data(self.dataset) + self.assertEqual(2, mock_bulk_create.call_count) + mock_bulk_create.assert_called_with(mock.ANY, batch_size=5) + self.assertEqual(10, result.total_rows) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_no_batch_size(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = None + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(1, mock_bulk_create.call_count) + mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["new"]) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_called_dry_run(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = None + + resource = _BookResource() + result = resource.import_data(self.dataset, dry_run=True) + self.assertEqual(1, mock_bulk_create.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["new"]) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_not_called_when_not_using_transactions(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + def import_data( + self, + dataset, + dry_run=False, + raise_errors=False, + use_transactions=None, + collect_failed_rows=False, + **kwargs, + ): + # override so that we can enforce not using_transactions + using_transactions = False + return self.import_data_inner( + dataset, + dry_run, + raise_errors, + using_transactions, + collect_failed_rows, + **kwargs, + ) + + class Meta: + model = Book + use_bulk = True + + resource = _BookResource() + resource.import_data(self.dataset, dry_run=True) + mock_bulk_create.assert_not_called() + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_batch_size_of_4(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 4 + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(3, mock_bulk_create.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["new"]) + + def test_no_changes_for_errors_if_use_transactions_enabled(self): + with mock.patch("import_export.results.Result.has_errors") as mock_has_errors: + mock_has_errors.return_val = True + self.resource.import_data(self.dataset) + self.assertEqual(0, Book.objects.count()) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_use_bulk_disabled(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = False + + resource = _BookResource() + result = resource.import_data(self.dataset) + mock_bulk_create.assert_not_called() + self.assertEqual(10, Book.objects.count()) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["new"]) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_bad_batch_size_value(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = "a" + + resource = _BookResource() + with self.assertRaises(ValueError): + resource.import_data(self.dataset) + mock_bulk_create.assert_not_called() + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_negative_batch_size_value(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = -1 + + resource = _BookResource() + with self.assertRaises(ValueError): + resource.import_data(self.dataset) + mock_bulk_create.assert_not_called() + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_oversized_batch_size_value(self, mock_bulk_create): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 100 + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(1, mock_bulk_create.call_count) + mock_bulk_create.assert_called_with(mock.ANY, batch_size=None) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["new"]) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_logs_exception(self, mock_bulk_create): + e = ValidationError("invalid field") + mock_bulk_create.side_effect = e + + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 100 + + resource = _BookResource() + with mock.patch("logging.Logger.debug") as mock_exception: + resource.import_data(self.dataset) + mock_exception.assert_called_with(e, exc_info=e) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_raises_exception(self, mock_bulk_create): + mock_bulk_create.side_effect = ValidationError("invalid field") + + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 100 + + resource = _BookResource() + with self.assertRaises(exceptions.ImportError): + resource.import_data(self.dataset, raise_errors=True) + + @mock.patch("core.models.Book.objects.bulk_create") + def test_bulk_create_exception_gathered_on_dry_run(self, mock_bulk_create): + mock_bulk_create.side_effect = ValidationError("invalid field") + + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 100 + + resource = _BookResource() + result = resource.import_data(self.dataset, dry_run=True, raise_errors=False) + self.assertTrue(result.has_errors()) + + def test_m2m_not_called_for_bulk(self): + mock_m2m_widget = mock.Mock(spec=widgets.ManyToManyWidget) + + class BookM2MResource(resources.ModelResource): + categories = fields.Field(attribute="categories", widget=mock_m2m_widget) + + class Meta: + model = Book + use_bulk = True + + resource = BookM2MResource() + self.dataset.append_col(["Cat 1|Cat 2"] * 10, header="categories") + resource.import_data(self.dataset, raise_errors=True) + mock_m2m_widget.assert_not_called() + + def test_force_init_instance(self): + class _BookResource(resources.ModelResource): + def get_instance(self, instance_loader, row): + raise AssertionError("should not be called") + + class Meta: + model = Book + force_init_instance = True + + resource = _BookResource() + self.assertIsNotNone( + resource.get_or_init_instance( + ModelInstanceLoader(resource), self.dataset[0] + ) + ) + + @mock.patch("import_export.resources.atomic_if_using_transaction") + def test_no_sub_transaction_on_row_for_bulk(self, mock_atomic_if_using_transaction): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + + resource = _BookResource() + resource.import_data(self.dataset) + self.assertIn( + False, [x[0][0] for x in mock_atomic_if_using_transaction.call_args_list] + ) + + +class BulkUpdateTest(BulkTest): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + fields = ("id", "name") + import_id_fields = ("id",) + + def setUp(self): + super().setUp() + self.init_update_test_data() + self.resource = self._BookResource() + + def test_bulk_update(self): + result = self.resource.import_data(self.dataset) + [self.assertEqual("UPDATED", b.name) for b in Book.objects.all()] + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_batch_size_of_4(self, mock_bulk_update): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 4 + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(3, mock_bulk_update.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_batch_size_of_5(self, mock_bulk_update): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = 5 + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(2, mock_bulk_update.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_no_batch_size(self, mock_bulk_update): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + batch_size = None + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(1, mock_bulk_update.call_count) + mock_bulk_update.assert_called_with(mock.ANY, mock.ANY, batch_size=None) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_not_called_when_not_using_transactions(self, mock_bulk_update): + class _BookResource(resources.ModelResource): + def import_data( + self, + dataset, + dry_run=False, + raise_errors=False, + use_transactions=None, + collect_failed_rows=False, + **kwargs, + ): + # override so that we can enforce not using_transactions + using_transactions = False + return self.import_data_inner( + dataset, + dry_run, + raise_errors, + using_transactions, + collect_failed_rows, + **kwargs, + ) + + class Meta: + model = Book + use_bulk = True + + resource = _BookResource() + resource.import_data(self.dataset, dry_run=True) + mock_bulk_update.assert_not_called() + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_called_for_dry_run(self, mock_bulk_update): + self.resource.import_data(self.dataset, dry_run=True) + self.assertEqual(1, mock_bulk_update.call_count) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_not_called_when_use_bulk_disabled(self, mock_bulk_update): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = False + + resource = _BookResource() + result = resource.import_data(self.dataset) + self.assertEqual(10, Book.objects.count()) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + mock_bulk_update.assert_not_called() + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_logs_exception(self, mock_bulk_update): + e = ValidationError("invalid field") + mock_bulk_update.side_effect = e + + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + + resource = _BookResource() + with mock.patch("logging.Logger.debug") as mock_exception: + resource.import_data(self.dataset) + mock_exception.assert_called_with(e, exc_info=e) + + @mock.patch("core.models.Book.objects.bulk_update") + def test_bulk_update_raises_exception(self, mock_bulk_update): + e = ValidationError("invalid field") + mock_bulk_update.side_effect = e + + class _BookResource(resources.ModelResource): + class Meta: + model = Book + use_bulk = True + + resource = _BookResource() + with self.assertRaises(exceptions.ImportError) as raised_exc: + resource.import_data(self.dataset, raise_errors=True) + self.assertEqual(e, raised_exc) + + +class BulkUUIDBookUpdateTest(BulkTest): + def setUp(self): + super().setUp() + self.init_update_test_data(model=UUIDBook) + + @mock.patch("core.models.UUIDBook.objects.bulk_update") + def test_bulk_update_uuid_model(self, mock_bulk_update): + """Test update of a Model which defines uuid not pk (issue #1274)""" + + class _UUIDBookResource(resources.ModelResource): + class Meta: + model = UUIDBook + use_bulk = True + batch_size = 5 + fields = ( + "id", + "name", + ) + + resource = _UUIDBookResource() + result = resource.import_data(self.dataset) + self.assertEqual(2, mock_bulk_update.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["update"]) + + +class BulkDeleteTest(BulkTest): + class DeleteBookResource(resources.ModelResource): + def for_delete(self, row, instance): + return True + + class Meta: + model = Book + use_bulk = True + # there are errors when diffing with mocks + # therefore disable diff with this flag + skip_diff = True + + def setUp(self): + super().setUp() + self.resource = self.DeleteBookResource() + self.resource._meta.batch_size = 1000 + self.resource._meta.use_bulk = True + self.init_update_test_data() + + @mock.patch("core.models.Book.delete") + def test_bulk_delete_use_bulk_is_false(self, mock_obj_delete): + self.resource._meta.use_bulk = False + self.resource.import_data(self.dataset) + self.assertEqual(10, mock_obj_delete.call_count) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_batch_size_of_4(self, mock_obj_manager): + self.resource._meta.batch_size = 4 + result = self.resource.import_data(self.dataset) + self.assertEqual(3, mock_obj_manager.filter.return_value.delete.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["delete"]) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_batch_size_of_5(self, mock_obj_manager): + self.resource._meta.batch_size = 5 + result = self.resource.import_data(self.dataset) + self.assertEqual(2, mock_obj_manager.filter.return_value.delete.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["delete"]) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_batch_size_is_none(self, mock_obj_manager): + self.resource._meta.batch_size = None + result = self.resource.import_data(self.dataset) + self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count) + self.assertEqual(10, result.total_rows) + self.assertEqual(10, result.totals["delete"]) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_not_called_when_not_using_transactions(self, mock_obj_manager): + class _BookResource(self.DeleteBookResource): + def import_data( + self, + dataset, + dry_run=False, + raise_errors=False, + use_transactions=None, + collect_failed_rows=False, + **kwargs, + ): + # override so that we can enforce not using_transactions + using_transactions = False + return self.import_data_inner( + dataset, + dry_run, + raise_errors, + using_transactions, + collect_failed_rows, + **kwargs, + ) + + resource = _BookResource() + resource.import_data(self.dataset, dry_run=True) + self.assertEqual(0, mock_obj_manager.filter.return_value.delete.call_count) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_called_for_dry_run(self, mock_obj_manager): + self.resource.import_data(self.dataset, dry_run=True) + self.assertEqual(1, mock_obj_manager.filter.return_value.delete.call_count) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_logs_exception(self, mock_obj_manager): + e = Exception("invalid") + mock_obj_manager.filter.return_value.delete.side_effect = e + + with mock.patch("logging.Logger.debug") as mock_exception: + self.resource.import_data(self.dataset) + mock_exception.assert_called_with(e, exc_info=mock.ANY) + self.assertEqual(1, mock_exception.call_count) + + @mock.patch("core.models.Book.objects") + def test_bulk_delete_raises_exception(self, mock_obj_manager): + e = Exception("invalid") + mock_obj_manager.filter.return_value.delete.side_effect = e + + with self.assertRaises(Exception) as raised_exc: + self.resource.import_data(self.dataset, raise_errors=True) + self.assertEqual(e, raised_exc) + + +class BulkUUIDBookDeleteTest(BulkTest): + class DeleteBookResource(resources.ModelResource): + def for_delete(self, row, instance): + return True + + class Meta: + model = UUIDBook + use_bulk = True + batch_size = 5 + + def setUp(self): + super().setUp() + self.resource = self.DeleteBookResource() + self.init_update_test_data(model=UUIDBook) + + def test_bulk_delete_batch_size_of_5(self): + self.assertEqual(10, UUIDBook.objects.count()) + self.resource.import_data(self.dataset) + self.assertEqual(0, UUIDBook.objects.count()) diff --git a/tests/core/tests/test_resources/test_diffs.py b/tests/core/tests/test_resources/test_diffs.py new file mode 100644 index 000000000..2bcf7627e --- /dev/null +++ b/tests/core/tests/test_resources/test_diffs.py @@ -0,0 +1,118 @@ +from unittest import mock + +import tablib +from core.models import Book +from django.test import TestCase + +from import_export import resources + + +@mock.patch("import_export.resources.Diff", spec=True) +class SkipDiffTest(TestCase): + """ + Tests that the meta attribute 'skip_diff' means that no diff operations are called. + 'copy.deepcopy' cannot be patched at class level because it causes interferes with + ``resources.Resource.__init__()``. + """ + + def setUp(self): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + skip_diff = True + + self.resource = _BookResource() + self.dataset = tablib.Dataset(headers=["id", "name", "birthday"]) + self.dataset.append(["", "A.A.Milne", "1882test-01-18"]) + + def test_skip_diff(self, mock_diff): + with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: + self.resource.import_data(self.dataset) + mock_diff.return_value.compare_with.assert_not_called() + mock_diff.return_value.as_html.assert_not_called() + mock_deep_copy.assert_not_called() + + def test_skip_diff_for_delete_new_resource(self, mock_diff): + class BookResource(resources.ModelResource): + class Meta: + model = Book + skip_diff = True + + def for_delete(self, row, instance): + return True + + resource = BookResource() + with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: + resource.import_data(self.dataset) + mock_diff.return_value.compare_with.assert_not_called() + mock_diff.return_value.as_html.assert_not_called() + mock_deep_copy.assert_not_called() + + def test_skip_diff_for_delete_existing_resource(self, mock_diff): + book = Book.objects.create() + + class BookResource(resources.ModelResource): + class Meta: + model = Book + skip_diff = True + + def get_or_init_instance(self, instance_loader, row): + return book, False + + def for_delete(self, row, instance): + return True + + resource = BookResource() + + with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: + resource.import_data(self.dataset, dry_run=True) + mock_diff.return_value.compare_with.assert_not_called() + mock_diff.return_value.as_html.assert_not_called() + mock_deep_copy.assert_not_called() + + def test_skip_diff_for_delete_skip_row_not_enabled_new_object(self, mock_diff): + class BookResource(resources.ModelResource): + class Meta: + model = Book + skip_diff = False + + def for_delete(self, row, instance): + return True + + resource = BookResource() + + with mock.patch("import_export.resources.deepcopy") as mock_deep_copy: + resource.import_data(self.dataset, dry_run=True) + self.assertEqual(1, mock_diff.return_value.compare_with.call_count) + self.assertEqual(1, mock_deep_copy.call_count) + + def test_skip_row_returns_false_when_skip_diff_is_true(self, mock_diff): + class BookResource(resources.ModelResource): + class Meta: + model = Book + skip_unchanged = True + skip_diff = True + + resource = BookResource() + + with mock.patch( + "import_export.resources.Resource.get_import_fields" + ) as mock_get_import_fields: + resource.import_data(self.dataset, dry_run=True) + self.assertEqual(3, mock_get_import_fields.call_count) + + +class SkipHtmlDiffTest(TestCase): + def test_skip_html_diff(self): + class BookResource(resources.ModelResource): + class Meta: + model = Book + skip_html_diff = True + + resource = BookResource() + self.dataset = tablib.Dataset(headers=["id", "name", "birthday"]) + self.dataset.append(["", "A.A.Milne", "1882test-01-18"]) + + with mock.patch("import_export.resources.Diff.as_html") as mock_as_html: + resource.import_data(self.dataset, dry_run=True) + mock_as_html.assert_not_called() diff --git a/tests/core/tests/test_resources/test_import_export.py b/tests/core/tests/test_resources/test_import_export.py new file mode 100644 index 000000000..82eb24efa --- /dev/null +++ b/tests/core/tests/test_resources/test_import_export.py @@ -0,0 +1,569 @@ +from datetime import date +from unittest.mock import patch + +import tablib +from core.admin import UUIDBookResource +from core.models import Author, Book, Category, EBook, NamedAuthor, UUIDBook +from core.tests.resources import AuthorResource, BookResource +from django.test import TestCase + +from import_export import exceptions, fields, resources, widgets +from import_export.fields import Field +from import_export.resources import ModelResource + + +class AfterImportComparisonTest(TestCase): + class BookResource(resources.ModelResource): + is_published = False + + def after_import_row(self, row, row_result, **kwargs): + if ( + getattr(row_result.original, "published") is None + and getattr(row_result.instance, "published") is not None + ): + self.is_published = True + + class Meta: + model = Book + store_instance = True + + def setUp(self): + super().setUp() + self.resource = AfterImportComparisonTest.BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "published"]) + row = [self.book.pk, "Some book", "2023-05-09"] + self.dataset.append(row) + + def test_after_import_row_check_for_change(self): + # issue 1583 - assert that `original` object is available to after_import_row() + self.resource.import_data(self.dataset, raise_errors=True) + self.assertTrue(self.resource.is_published) + + +class ImportExportFieldOrderTest(TestCase): + class BaseBookResource(resources.ModelResource): + def __init__(self): + self.field_names = [] + + def get_queryset(self): + return Book.objects.all().order_by("id") + + def import_field(self, field, obj, data, is_m2m=False, **kwargs): + # mock out import_field() so that we can see the order + # fields were called + self.field_names.append(field.column_name) + + class UnorderedBookResource(BaseBookResource): + class Meta: + fields = ("price", "id", "name") + model = Book + + class OrderedBookResource(BaseBookResource): + class Meta: + fields = ("price", "id", "name") + import_order = ["price", "name", "id"] + export_order = ("price", "name", "id") + model = Book + + class SubsetOrderedBookResource(BaseBookResource): + class Meta: + fields = ("price", "id", "name", "published") + import_order = ("name",) + export_order = ("published",) + model = Book + + class DuplicateFieldsBookResource(BaseBookResource): + class Meta: + fields = ("id", "price", "name", "price") + model = Book + + class FieldsAsListBookResource(BaseBookResource): + class Meta: + fields = ["id", "price", "name"] + model = Book + + class MixedIterableBookResource(BaseBookResource): + class Meta: + fields = ("price", "id", "name") + import_order = ["price", "name", "id"] + model = Book + + class DeclaredModelFieldBookResource(BaseBookResource): + # Non-model field, should come after model fields by default + author_full_name = fields.Field( + attribute="author", + column_name="author full name", + ) + + # Order of declared fields in `ModelResource` shouldn't change export order + categories = fields.Field( + attribute="categories", + column_name="categories", + widget=widgets.ManyToManyWidget(model=Category, field="name"), + ) + published = fields.Field( + attribute="published", + column_name="published", + widget=widgets.DateWidget("%d.%m.%Y"), + ) + author = fields.Field(attribute="author__name", column_name="author") + + class Meta: + model = Book + + def dehydrate_author_full_name(self, obj): + if obj.author: + return f"{obj.author.name} Bar" + + return "" + + def setUp(self): + super().setUp() + self.pk = Book.objects.create(name="Ulysses", price="1.99").pk + self.dataset = tablib.Dataset(headers=["id", "name", "price"]) + row = [self.pk, "Some book", "19.99"] + self.dataset.append(row) + + def test_mixed_iterable(self): + # 1878 + self.resource = ImportExportFieldOrderTest.MixedIterableBookResource() + self.resource.import_data(self.dataset) + self.assertEqual(["price", "name", "id"], self.resource.field_names) + + def test_defined_import_order(self): + self.resource = ImportExportFieldOrderTest.OrderedBookResource() + self.resource.import_data(self.dataset) + self.assertEqual(["price", "name", "id"], self.resource.field_names) + + def test_undefined_import_order(self): + self.resource = ImportExportFieldOrderTest.UnorderedBookResource() + self.resource.import_data(self.dataset) + self.assertEqual(["price", "id", "name"], self.resource.field_names) + + def test_defined_export_order(self): + self.resource = ImportExportFieldOrderTest.OrderedBookResource() + data = self.resource.export() + target = f"price,name,id\r\n1.99,Ulysses,{self.pk}\r\n" + self.assertEqual(target, data.csv) + + def test_undefined_export_order(self): + # When export order is not defined, + # exported order should correspond with 'fields' definition + self.resource = ImportExportFieldOrderTest.UnorderedBookResource() + data = self.resource.export() + target = f"price,id,name\r\n1.99,{self.pk},Ulysses\r\n" + self.assertEqual(target, data.csv) + + def test_subset_import_order(self): + self.resource = ImportExportFieldOrderTest.SubsetOrderedBookResource() + self.resource.import_data(self.dataset) + self.assertEqual( + ["name", "price", "id", "published"], self.resource.field_names + ) + + def test_subset_export_order(self): + self.resource = ImportExportFieldOrderTest.SubsetOrderedBookResource() + data = self.resource.export() + target = f"published,price,id,name\r\n,1.99,{self.pk},Ulysses\r\n" + self.assertEqual(target, data.csv) + + def test_duplicate_import_order(self): + self.resource = ImportExportFieldOrderTest.DuplicateFieldsBookResource() + self.resource.import_data(self.dataset) + self.assertEqual(["id", "price", "name"], self.resource.field_names) + + def test_duplicate_export_order(self): + self.resource = ImportExportFieldOrderTest.DuplicateFieldsBookResource() + data = self.resource.export() + target = f"id,price,name\r\n{self.pk},1.99,Ulysses\r\n" + self.assertEqual(target, data.csv) + + def test_fields_as_list_import_order(self): + self.resource = ImportExportFieldOrderTest.FieldsAsListBookResource() + self.resource.import_data(self.dataset) + self.assertEqual(["id", "price", "name"], self.resource.field_names) + + def test_fields_as_list_export_order(self): + self.resource = ImportExportFieldOrderTest.FieldsAsListBookResource() + data = self.resource.export() + target = f"id,price,name\r\n{self.pk},1.99,Ulysses\r\n" + self.assertEqual(target, data.csv) + + def test_declared_model_fields_not_alter_export_order(self): + # Issue (#1663) + + categories = [ + Category.objects.create(name="sci-fi"), + Category.objects.create(name="romance"), + ] + author = Author.objects.create(name="Foo") + book = Book.objects.create( + name="The Lord Of The Rings", author=author, published=date(2022, 2, 2) + ) + book.categories.set(categories) + + self.resource = ImportExportFieldOrderTest.DeclaredModelFieldBookResource() + declared_field_names = ( + "published", + "author", # FK + "categories", # M2M + ) + export_order = self.resource.get_export_order() + model_fields_names = [ + field.name for field in self.resource._meta.model._meta.get_fields() + ] + + for declared_field_name in declared_field_names: + self.assertEqual( + model_fields_names.index(declared_field_name), + export_order.index(declared_field_name), + ) + + # Validate non-model field is exported last unless specified + self.assertEqual(export_order[-1], "author_full_name") + + def test_meta_fields_not_alter_export_order(self): + class DeclaredModelFieldBookResource( + ImportExportFieldOrderTest.BaseBookResource + ): + # Non-model field, should come after model fields by default + author_full_name = fields.Field( + attribute="author", + column_name="author full name", + ) + + # Order of declared fields in `ModelResource` shouldn't change export order + categories = fields.Field( + attribute="categories", + column_name="categories", + widget=widgets.ManyToManyWidget(model=Category, field="name"), + ) + published = fields.Field( + attribute="published", + column_name="published", + widget=widgets.DateWidget("%d.%m.%Y"), + ) + author = fields.Field(attribute="author__name", column_name="author") + + class Meta: + model = Book + fields = ( + "id", + "author__name", + "author", + "author_full_name", + "categories", + "published", + ) + + def dehydrate_author_full_name(self, obj): + if obj.author: + return f"{obj.author.name} Bar" + + return "" + + self.resource = DeclaredModelFieldBookResource() + self.assertEqual(self.resource.get_export_order(), self.resource._meta.fields) + + def test_declared_field_export_order(self): + # issue 1848 + class DeclaredModelFieldBookResource( + ImportExportFieldOrderTest.BaseBookResource + ): + published = fields.Field( + attribute="published", + column_name="date published", + widget=widgets.DateWidget("%d.%m.%Y"), + ) + + class Meta: + model = Book + fields = ( + "id", + "author", + "published", + ) + export_order = ( + "published", + "id", + "author", + ) + + self.resource = DeclaredModelFieldBookResource() + data = self.resource.export() + target = f"date published,id,author\r\n,{self.pk},\r\n" + self.assertEqual(target, data.csv) + + def test_export_fields_column_name(self): + """Test export with declared export_fields and custom column_name""" + + # issue 1846 + class DeclaredModelFieldBookResource(resources.ModelResource): + published = fields.Field( + attribute="published", + column_name="datePublished", + widget=widgets.DateWidget("%d.%m.%Y"), + ) + author = fields.Field(column_name="AuthorFooName") + + class Meta: + model = Book + fields = ( + "id", + "author", + "published", + ) + export_order = ( + "published", + "id", + "author", + ) + + def dehydrate_author(self, obj): + return obj.author + + self.resource = DeclaredModelFieldBookResource() + data = self.resource.export() + target = f"datePublished,id,AuthorFooName\r\n,{self.pk},\r\n" + self.assertEqual(target, data.csv) + + +class ImportIdFieldsTestCase(TestCase): + class BookResource(resources.ModelResource): + name = fields.Field(attribute="name", column_name="book_name") + + class Meta: + model = Book + import_id_fields = ["name"] + + def setUp(self): + super().setUp() + self.book = Book.objects.create(name="The Hobbit") + self.resource = ImportIdFieldsTestCase.BookResource() + + def test_custom_column_name_warns_if_not_present(self): + dataset = tablib.Dataset( + *[(self.book.pk, "Some book")], headers=["id", "wrong_name"] + ) + with self.assertRaises(exceptions.ImportError) as e: + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual( + "The following fields are declared in 'import_id_fields' " + "but are not present in the file headers: book_name", + str(e.exception), + ) + + def test_custom_column_name_warns_if_not_present_as_error_in_result(self): + dataset = tablib.Dataset( + *[(self.book.pk, "Some book")], headers=["id", "wrong_name"] + ) + res = self.resource.import_data(dataset, raise_errors=False) + target = ( + "The following fields are declared in 'import_id_fields' " + "but are not present in the file headers: book_name" + ) + self.assertEqual(target, str(res.base_errors[0].error)) + + def test_missing_import_id_field_raises_exception(self): + class TestBookResource(resources.ModelResource): + class Meta: + model = Book + import_id_fields = ("id", "a", "b") + + resource = TestBookResource() + + book = Book.objects.create(name="Some book") + row = [book.pk, "Some book"] + dataset = tablib.Dataset(*[row], headers=["id", "name"]) + dataset.append(row) + + with self.assertRaises(exceptions.ImportError) as e: + resource.import_data(dataset, raise_errors=True) + self.assertEqual( + "The following fields are declared in 'import_id_fields' " + "but are not present in the resource fields: a, b", + str(e.exception), + ) + + def test_multiple_import_id_fields(self): + class BookResource(resources.ModelResource): + class Meta: + model = Book + import_id_fields = ("id", "name", "author_email") + + self.resource = BookResource() + dataset = tablib.Dataset( + *[(self.book.pk, "Goldeneye", "ian.fleming@example.com")], + headers=["A", "name", "B"], + ) + with self.assertRaises(exceptions.ImportError) as e: + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual( + "The following fields are declared in 'import_id_fields' " + "but are not present in the file headers: id, author_email", + str(e.exception), + ) + + def test_dynamic_import_id_fields(self): + # issue 1834 + class BookResource(resources.ModelResource): + def before_import(self, dataset, **kwargs): + # mimic a 'dynamic field' - i.e. append field which exists on + # Book model, but not in dataset + dataset.headers.append("price") + super().before_import(dataset, **kwargs) + + class Meta: + model = Book + import_id_fields = ("price",) + + self.resource = BookResource() + dataset = tablib.Dataset( + *[(self.book.pk, "Goldeneye", "ian.fleming@example.com")], + headers=["id", "name", "author_email"], + ) + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual("Goldeneye", Book.objects.latest("id").name) + + +class ImportWithMissingFields(TestCase): + # issue 1517 + @patch("import_export.resources.logger") + @patch("import_export.fields.Field.save") + def test_import_with_missing_field_in_row(self, mock_field_save, mock_logger): + dataset = tablib.Dataset(*[(1, "Some book")], headers=["id", "name"]) + self.resource = BookResource() + result = self.resource.import_data(dataset) + self.assertFalse(result.has_errors()) + mock_logger.debug.assert_any_call( + "skipping field '' " + "- column name 'author_email' is not present in row" + ) + self.assertEqual(2, mock_field_save.call_count) + + def test_import_row_with_no_defined_id_field(self): + """Ensure a row with no id field can be imported (issue 1812).""" + self.assertEqual(0, Author.objects.count()) + dataset = tablib.Dataset(*[("J. R. R. Tolkien",)], headers=["name"]) + self.resource = AuthorResource() + self.resource.import_data(dataset) + self.assertEqual(1, Author.objects.count()) + + +class CustomColumnNameImportTest(TestCase): + """ + If a custom field is declared, import should work if either the Field's + attribute name or column name is referenced in the ``fields`` list (issue 1815). + """ + + fixtures = ["author"] + + class _EBookResource(ModelResource): + published = Field(attribute="published", column_name="published_date") + + class Meta: + model = EBook + fields = ("id", "name", "published_date") + + def setUp(self): + super().setUp() + self.resource = CustomColumnNameImportTest._EBookResource() + + def test_import_with_column_alias_in_fields_list(self): + self.assertEqual(0, EBook.objects.count()) + dataset = tablib.Dataset( + *[(1, "Moonraker", "1955-04-05")], headers=["id", "name", "published_date"] + ) + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual(1, EBook.objects.count()) + self.assertEqual(date(1955, 4, 5), EBook.objects.first().published) + + +class CustomPrimaryKeyRelationImportTest(TestCase): + """ + Test issue 1852. + Ensure import works when a relation has a custom primary key. + """ + + def setUp(self): + super().setUp() + # The name for this object is the PK + self.named_author = NamedAuthor.objects.create(name="Ian Fleming") + self.resource = UUIDBookResource() + + def test_custom_column_name_warns_if_not_present(self): + dataset = tablib.Dataset( + *[("Moonraker", "Ian Fleming")], headers=["name", "author"] + ) + self.assertEqual(0, UUIDBook.objects.count()) + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual(1, UUIDBook.objects.count()) + + +class DeclaredFieldWithNoAttributeTestCase(TestCase): + """ + If a custom field is declared, import should skip setting an attribute if the + Field declaration has no attribute name. + # 1874 + """ + + class _EBookResource(ModelResource): + published = Field(column_name="published") + + class Meta: + model = EBook + fields = ("id", "name", "published") + + def setUp(self): + super().setUp() + self.resource = DeclaredFieldWithNoAttributeTestCase._EBookResource() + + @patch("import_export.resources.logger") + def test_import_with_no_attribute(self, mock_logger): + self.assertEqual(0, EBook.objects.count()) + dataset = tablib.Dataset( + *[(1, "Moonraker", "1955-04-05")], headers=["id", "name", "published"] + ) + self.resource.import_data(dataset, raise_errors=True) + self.assertEqual(1, EBook.objects.count()) + self.assertIsNone(EBook.objects.first().published) + mock_logger.debug.assert_any_call( + "skipping field '' " + "- field attribute is not defined" + ) + + +class QuerysetValuesOnExportTest(TestCase): + """ + Issue 2020 - export should handle QuerySet.values() + """ + + class _EBookResource(ModelResource): + + def get_queryset(self): + return EBook.objects.all().values("id", "name", "published") + + class Meta: + model = EBook + fields = ("id", "name", "published") + + def setUp(self): + super().setUp() + self.resource = QuerysetValuesOnExportTest._EBookResource() + EBook.objects.create(id=101, name="Moonraker", published=date(1955, 4, 5)) + + def test_export(self): + res = self.resource.export() + self.assertEqual(1, len(res.dict)) + self.assertDictEqual( + {"id": "101", "name": "Moonraker", "published": "1955-04-05"}, + res.dict.pop(), + ) + + def test_get_value_returns_none_when_attribute_missing(self): + instance = {"some_other_key": "value"} + field = Field(attribute="missing_attribute") + + result = field.get_value(instance) + self.assertIsNone(result) diff --git a/tests/core/tests/test_resources/test_misc.py b/tests/core/tests/test_resources/test_misc.py new file mode 100644 index 000000000..abfe61066 --- /dev/null +++ b/tests/core/tests/test_resources/test_misc.py @@ -0,0 +1,51 @@ +import tablib +from core.models import Author, Book, Category +from django.test import TestCase + +from import_export import resources, results + + +class RawValueTest(TestCase): + def setUp(self): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + store_row_values = True + + self.resource = _BookResource() + + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_import_data(self): + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + self.assertTrue(result.rows[0].diff) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertEqual(result.rows[0].row_values.get("name"), "Some book") + self.assertEqual( + result.rows[0].row_values.get("author_email"), "test@example.com" + ) + self.assertEqual(result.rows[0].row_values.get("price"), "10.25") + + +class ResourcesHelperFunctionsTest(TestCase): + """ + Test the helper functions in resources. + """ + + def test_has_natural_foreign_key(self): + """ + Ensure that resources.has_natural_foreign_key detects correctly + whether a model has a natural foreign key + """ + cases = {Book: True, Author: True, Category: False} + + for model, expected_result in cases.items(): + self.assertEqual(resources.has_natural_foreign_key(model), expected_result) diff --git a/tests/core/tests/test_resources/test_modelresource/__init__.py b/tests/core/tests/test_resources/test_modelresource/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/tests/test_resources/test_modelresource/test_data_deletion.py b/tests/core/tests/test_resources/test_modelresource/test_data_deletion.py new file mode 100644 index 000000000..bbaa654e9 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_data_deletion.py @@ -0,0 +1,150 @@ +from unittest import mock + +import tablib +from core.models import Book +from core.tests.resources import BookResource +from django.test import TestCase + +from import_export import fields, results, widgets + + +class DataDeletionDryRunTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_import_data_delete(self): + class B(BookResource): + delete = fields.Field(widget=widgets.BooleanWidget()) + + def for_delete(self, row, instance): + return self.fields["delete"].clean(row) + + row = [self.book.pk, self.book.name, "1"] + dataset = tablib.Dataset(*[row], headers=["id", "name", "delete"]) + result = B().import_data(dataset, raise_errors=True) + self.assertFalse(result.has_errors()) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_DELETE + ) + self.assertFalse(Book.objects.filter(pk=self.book.pk)) + self.assertIsNone(result.rows[0].instance) + self.assertIsNone(result.rows[0].original) + + def test_import_data_delete_store_instance(self): + class B(BookResource): + delete = fields.Field(widget=widgets.BooleanWidget()) + + def for_delete(self, row, instance): + return self.fields["delete"].clean(row) + + class Meta: + store_instance = True + + row = [self.book.pk, self.book.name, "1"] + dataset = tablib.Dataset(*[row], headers=["id", "name", "delete"]) + result = B().import_data(dataset, raise_errors=True) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_DELETE + ) + self.assertIsNotNone(result.rows[0].instance) + + def test_save_instance_with_dry_run_flag(self): + class B(BookResource): + def before_save_instance(self, instance, row, **kwargs): + super().before_save_instance(instance, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.before_save_instance_dry_run = True + else: + self.before_save_instance_dry_run = False + + def save_instance(self, instance, new, row, **kwargs): + super().save_instance(instance, new, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.save_instance_dry_run = True + else: + self.save_instance_dry_run = False + + def after_save_instance(self, instance, row, **kwargs): + super().after_save_instance(instance, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.after_save_instance_dry_run = True + else: + self.after_save_instance_dry_run = False + + resource = B() + resource.import_data(self.dataset, dry_run=True, raise_errors=True) + self.assertTrue(resource.before_save_instance_dry_run) + self.assertTrue(resource.save_instance_dry_run) + self.assertTrue(resource.after_save_instance_dry_run) + + resource.import_data(self.dataset, dry_run=False, raise_errors=True) + self.assertFalse(resource.before_save_instance_dry_run) + self.assertFalse(resource.save_instance_dry_run) + self.assertFalse(resource.after_save_instance_dry_run) + + @mock.patch("core.models.Book.save") + def test_save_instance_noop(self, mock_book): + book = Book.objects.first() + self.resource.save_instance( + book, False, None, using_transactions=False, dry_run=True + ) + self.assertEqual(0, mock_book.call_count) + + @mock.patch("core.models.Book.save") + def test_delete_instance_noop(self, mock_book): + book = Book.objects.first() + self.resource.delete_instance( + book, None, using_transactions=False, dry_run=True + ) + self.assertEqual(0, mock_book.call_count) + + def test_delete_instance_with_dry_run_flag(self): + class B(BookResource): + delete = fields.Field(widget=widgets.BooleanWidget()) + + def for_delete(self, row, instance): + return self.fields["delete"].clean(row) + + def before_delete_instance(self, instance, row, **kwargs): + super().before_delete_instance(instance, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.before_delete_instance_dry_run = True + else: + self.before_delete_instance_dry_run = False + + def delete_instance(self, instance, row, **kwargs): + super().delete_instance(instance, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.delete_instance_dry_run = True + else: + self.delete_instance_dry_run = False + + def after_delete_instance(self, instance, row, **kwargs): + super().after_delete_instance(instance, row, **kwargs) + dry_run = kwargs.get("dry_run", False) + if dry_run: + self.after_delete_instance_dry_run = True + else: + self.after_delete_instance_dry_run = False + + resource = B() + row = [self.book.pk, self.book.name, "1"] + dataset = tablib.Dataset(*[row], headers=["id", "name", "delete"]) + resource.import_data(dataset, dry_run=True, raise_errors=True) + self.assertTrue(resource.before_delete_instance_dry_run) + self.assertTrue(resource.delete_instance_dry_run) + self.assertTrue(resource.after_delete_instance_dry_run) + + resource.import_data(dataset, dry_run=False, raise_errors=True) + self.assertFalse(resource.before_delete_instance_dry_run) + self.assertFalse(resource.delete_instance_dry_run) + self.assertFalse(resource.after_delete_instance_dry_run) diff --git a/tests/core/tests/test_resources/test_modelresource/test_data_handling.py b/tests/core/tests/test_resources/test_modelresource/test_data_handling.py new file mode 100644 index 000000000..3f5f1aa37 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_data_handling.py @@ -0,0 +1,118 @@ +from decimal import InvalidOperation + +import tablib +from core.models import Author, Book +from core.tests.resources import AuthorResourceWithCustomWidget, BookResource +from django.test import TestCase + +from import_export import resources, results + + +class DataHandlingTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_import_data_handles_widget_valueerrors_with_unicode_messages(self): + resource = AuthorResourceWithCustomWidget() + dataset = tablib.Dataset(headers=["id", "name", "birthday"]) + dataset.append(["", "A.A.Milne", "1882-01-18"]) + + result = resource.import_data(dataset, raise_errors=False) + + self.assertTrue(result.has_validation_errors()) + self.assertIs(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_INVALID) + self.assertEqual( + result.invalid_rows[0].field_specific_errors["name"], + ["Ова вриједност је страшна!"], + ) + + def test_model_validation_errors_not_raised_when_clean_model_instances_is_false( + self, + ): + class TestResource(resources.ModelResource): + class Meta: + model = Author + clean_model_instances = False + + resource = TestResource() + dataset = tablib.Dataset(headers=["id", "name"]) + dataset.append(["", "123"]) + + result = resource.import_data(dataset, raise_errors=False) + self.assertFalse(result.has_validation_errors()) + self.assertEqual(len(result.invalid_rows), 0) + + def test_model_validation_errors_raised_when_clean_model_instances_is_true(self): + class TestResource(resources.ModelResource): + class Meta: + model = Author + clean_model_instances = True + export_order = ["id", "name", "birthday"] + + # create test dataset + # NOTE: column order is deliberately strange + dataset = tablib.Dataset(headers=["name", "id"]) + dataset.append(["123", "1"]) + + # run import_data() + resource = TestResource() + result = resource.import_data(dataset, raise_errors=False) + + # check has_validation_errors() + self.assertTrue(result.has_validation_errors()) + + # check the invalid row itself + invalid_row = result.invalid_rows[0] + self.assertEqual(invalid_row.error_count, 1) + self.assertEqual( + invalid_row.field_specific_errors, {"name": ["'123' is not a valid value"]} + ) + # diff_header and invalid_row.values should match too + self.assertEqual(result.diff_headers, ["id", "name", "birthday"]) + self.assertEqual(invalid_row.values, ("1", "123", "---")) + + def test_known_invalid_fields_are_excluded_from_model_instance_cleaning(self): + # The custom widget on the parent class should complain about + # 'name' first, preventing Author.full_clean() from raising the + # error as it does in the previous test + + class TestResource(AuthorResourceWithCustomWidget): + class Meta: + model = Author + clean_model_instances = True + + resource = TestResource() + dataset = tablib.Dataset(headers=["id", "name"]) + dataset.append(["", "123"]) + + result = resource.import_data(dataset, raise_errors=False) + self.assertTrue(result.has_validation_errors()) + self.assertEqual(result.invalid_rows[0].error_count, 1) + self.assertEqual( + result.invalid_rows[0].field_specific_errors, + {"name": ["Ова вриједност је страшна!"]}, + ) + + def test_import_data_error_saving_model(self): + row = list(self.dataset.pop()) + # set pk to something that would yield error + row[0] = "foo" + self.dataset.append(row) + result = self.resource.import_data(self.dataset, raise_errors=False) + + self.assertTrue(result.has_errors()) + self.assertTrue(result.rows[0].errors) + actual = result.rows[0].errors[0].error + self.assertIsInstance(actual, (ValueError, InvalidOperation)) + self.assertIn( + str(actual), + { + "could not convert string to float", + "[]", + "Invalid literal for Decimal: 'foo'", + }, + ) diff --git a/tests/core/tests/test_resources/test_modelresource/test_data_import.py b/tests/core/tests/test_resources/test_modelresource/test_data_import.py new file mode 100644 index 000000000..6fccb076b --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_data_import.py @@ -0,0 +1,139 @@ +from decimal import Decimal +from unittest import mock + +import tablib +from core.models import Book +from core.tests.resources import BookResource, BookResourceWithStoreInstance +from django.test import TestCase, skipUnlessDBFeature + +from import_export import results +from import_export.resources import Diff + + +class DataImportTests(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_get_diff(self): + diff = Diff(self.resource, self.book, False) + book2 = Book(name="Some other book") + diff.compare_with(self.resource, book2) + html = diff.as_html() + headers = self.resource.get_export_headers() + self.assertEqual( + html[headers.index("name")], + 'Some ' + "other book", + ) + self.assertFalse(html[headers.index("author_email")]) + + def test_import_data_update(self): + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + self.assertTrue(result.rows[0].diff) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertEqual(result.rows[0].row_values.get("name"), None) + self.assertEqual(result.rows[0].row_values.get("author_email"), None) + + self.assertIsNone(result.rows[0].instance) + self.assertIsNotNone(result.rows[0].original) + + instance = Book.objects.get(pk=self.book.pk) + self.assertEqual(instance.author_email, "test@example.com") + self.assertEqual(instance.price, Decimal("10.25")) + + def test_import_data_new(self): + Book.objects.all().delete() + self.assertEqual(0, Book.objects.count()) + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + self.assertTrue(result.rows[0].diff) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_NEW) + self.assertEqual(result.rows[0].row_values.get("name"), None) + self.assertEqual(result.rows[0].row_values.get("author_email"), None) + + self.assertIsNone(result.rows[0].instance) + self.assertIsNone(result.rows[0].original) + + self.assertEqual(1, Book.objects.count()) + instance = Book.objects.first() + self.assertEqual(instance.author_email, "test@example.com") + self.assertEqual(instance.price, Decimal("10.25")) + + def test_import_data_new_store_instance(self): + self.resource = BookResourceWithStoreInstance() + Book.objects.all().delete() + self.assertEqual(0, Book.objects.count()) + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_NEW) + self.assertIsNotNone(result.rows[0].instance) + self.assertIsNone(result.rows[0].original) + self.assertEqual(1, Book.objects.count()) + book = Book.objects.first() + self.assertEqual(book.pk, result.rows[0].instance.pk) + + def test_import_data_update_store_instance(self): + self.resource = BookResourceWithStoreInstance() + result = self.resource.import_data(self.dataset, raise_errors=True) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertIsNotNone(result.rows[0].instance) + self.assertIsNotNone(result.rows[0].original) + self.assertEqual(1, Book.objects.count()) + book = Book.objects.first() + self.assertEqual(book.pk, result.rows[0].instance.pk) + + @skipUnlessDBFeature("supports_transactions") + @mock.patch("import_export.resources.connections") + def test_import_data_no_transaction(self, mock_db_connections): + class Features: + supports_transactions = False + + class DummyConnection: + features = Features() + + dummy_connection = DummyConnection() + mock_db_connections.__getitem__.return_value = dummy_connection + result = self.resource.import_data( + self.dataset, dry_run=True, use_transactions=False, raise_errors=True + ) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + self.assertTrue(result.rows[0].diff) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertEqual(result.rows[0].row_values.get("name"), None) + self.assertEqual(result.rows[0].row_values.get("author_email"), None) + + def test_import_data_new_override_do_instance_save(self): + class CustomDoInstanceSave(BookResource): + is_create = False + + def do_instance_save(self, instance, is_create): + self.is_create = is_create + super().do_instance_save(instance, is_create) + + Book.objects.all().delete() + self.assertEqual(0, Book.objects.count()) + self.resource = CustomDoInstanceSave() + self.assertFalse(self.resource.is_create) + + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(1, Book.objects.count()) + self.assertTrue(self.resource.is_create) diff --git a/tests/core/tests/test_resources/test_modelresource/test_deprecated_fields.py b/tests/core/tests/test_resources/test_modelresource/test_deprecated_fields.py new file mode 100644 index 000000000..a0aa76e1e --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_deprecated_fields.py @@ -0,0 +1,88 @@ +import warnings + +import tablib +from core.models import Book +from core.tests.resources import BookResource +from django.test import TestCase + +from import_export import resources + + +class DeprecatedMethodTest(TestCase): + """ + These tests relate to renamed methods in v4. + The tests can be removed when the deprecated methods are removed. + """ + + def setUp(self): + rows = [ + ["1", "Ulysses"], + ] + self.dataset = tablib.Dataset(*rows, headers=["id", "name"]) + self.obj = Book.objects.create(id=1, name="Ulysses") + + def test_import_obj_renamed(self): + resource = BookResource() + with self.assertWarns( + DeprecationWarning, + ): + resource.import_obj(self.obj, self.dataset, dry_run=True) + + def test_import_obj_passes_params(self): + class MyBookResource(resources.ModelResource): + def import_instance(self, instance, row, **kwargs): + self.kwargs = kwargs + + class Meta: + model = Book + + resource = MyBookResource() + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + resource.import_obj(self.obj, self.dataset, True) + self.assertTrue(resource.kwargs["dry_run"]) + + def test_after_import_instance_renamed(self): + resource = BookResource() + with self.assertWarns( + DeprecationWarning, + ): + resource.after_import_instance(self.obj, True, row_number=1) + + def test_after_import_instance_passes_params(self): + class MyBookResource(resources.ModelResource): + def after_init_instance(self, instance, new, row, **kwargs): + self.kwargs = kwargs + + class Meta: + model = Book + + resource = MyBookResource() + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + resource.after_import_instance(self.obj, True, row_number=1) + self.assertEqual(1, resource.kwargs["row_number"]) + + def test_get_fields_deprecated(self): + resource = BookResource() + with self.assertWarnsRegex( + DeprecationWarning, + r"The 'get_fields\(\)' method is deprecated " + "and will be removed in a future release", + ): + fields = resource.get_fields() + + self.assertEqual( + {f.column_name for f in fields}, + { + "added", + "author", + "author_email", + "categories", + "id", + "name", + "price", + "published_date", + "published_time", + }, + ) diff --git a/tests/core/tests/test_resources/test_modelresource/test_dynamic_customization.py b/tests/core/tests/test_resources/test_modelresource/test_dynamic_customization.py new file mode 100644 index 000000000..fedc4baa1 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_dynamic_customization.py @@ -0,0 +1,358 @@ +import tablib +from core.models import ( + Book, + Category, + Entry, + Profile, + WithDynamicDefault, + WithFloatField, +) +from core.tests.resources import BookResource, CategoryResource +from django.contrib.auth.models import User +from django.core.exceptions import FieldDoesNotExist +from django.db.models import Count +from django.db.utils import ConnectionDoesNotExist +from django.test import TestCase + +from import_export import exceptions, fields, resources, results + + +class DynamicBehaviorCustomizationTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_related_one_to_one(self): + # issue #17 - Exception when attempting access something on the + # related_name + + user = User.objects.create(username="foo") + profile = Profile.objects.create(user=user) + Entry.objects.create(user=user) + Entry.objects.create(user=User.objects.create(username="bar")) + + class EntryResource(resources.ModelResource): + class Meta: + model = Entry + fields = ("user__profile", "user__profile__is_private") + + resource = EntryResource() + dataset = resource.export(Entry.objects.all()) + self.assertEqual(dataset.dict[0]["user__profile"], profile.pk) + self.assertEqual(dataset.dict[0]["user__profile__is_private"], "1") + self.assertEqual(dataset.dict[1]["user__profile"], "") + self.assertEqual(dataset.dict[1]["user__profile__is_private"], "") + + def test_empty_get_queryset(self): + # issue #25 - Overriding queryset on export() fails when passed + # queryset has zero elements + dataset = self.resource.export(queryset=Book.objects.none()) + self.assertEqual(len(dataset), 0) + + def test_import_data_skip_unchanged(self): + class MyBookResource(resources.ModelResource): + save_count = 0 + + def save_instance(self, instance, is_create, row, **kwargs): + self.save_count += 1 + + class Meta: + skip_unchanged = True + model = Book + + # Make sure we test with ManyToMany related objects + cat1 = Category.objects.create(name="Cat 1") + cat2 = Category.objects.create(name="Cat 2") + self.book.categories.add(cat1) + self.book.categories.add(cat2) + dataset = self.resource.export() + + # Create a new resource that attempts to reimport the data currently + # in the database while skipping unchanged rows (i.e. all of them) + resource = MyBookResource() + result = resource.import_data(dataset, raise_errors=True) + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), len(dataset)) + self.assertTrue(result.rows[0].diff) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + self.assertEqual(result.rows[0].object_id, self.book.pk) + if resource.save_count > 0: + self.fail("Resource attempted to save instead of skipping") + + # Test that we can suppress reporting of skipped rows + resource._meta.report_skipped = False + result = resource.import_data(dataset, raise_errors=True) + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 0) + + def test_before_import_access_to_kwargs(self): + class B(BookResource): + def before_import(self, dataset, **kwargs): + if "extra_arg" in kwargs: + dataset.headers[dataset.headers.index("author_email")] = "old_email" + dataset.insert_col( + 0, lambda row: kwargs["extra_arg"], header="author_email" + ) + + resource = B() + result = resource.import_data( + self.dataset, raise_errors=True, extra_arg="extra@example.com" + ) + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + instance = Book.objects.get(pk=self.book.pk) + self.assertEqual(instance.author_email, "extra@example.com") + + def test_before_import_raises_error(self): + class B(BookResource): + def before_import(self, dataset, **kwargs): + raise Exception("This is an invalid dataset") + + resource = B() + with self.assertRaises(exceptions.ImportError) as cm: + resource.import_data(self.dataset, raise_errors=True) + self.assertEqual("This is an invalid dataset", cm.exception.error.args[0]) + + def test_after_import_raises_error(self): + class B(BookResource): + def after_import(self, dataset, result, **kwargs): + raise Exception("This is an invalid dataset") + + resource = B() + with self.assertRaises(exceptions.ImportError) as cm: + resource.import_data(self.dataset, raise_errors=True) + self.assertEqual("This is an invalid dataset", cm.exception.error.args[0]) + + def test_link_to_nonexistent_field(self): + with self.assertRaises(FieldDoesNotExist) as cm: + + class BrokenBook1(resources.ModelResource): + class Meta: + model = Book + fields = ("nonexistent__invalid",) + + self.assertEqual( + "Book.nonexistent: Book has no field named 'nonexistent'", + cm.exception.args[0], + ) + + with self.assertRaises(FieldDoesNotExist) as cm: + + class BrokenBook2(resources.ModelResource): + class Meta: + model = Book + fields = ("author__nonexistent",) + + self.assertEqual( + "Book.author.nonexistent: Author has no field named " "'nonexistent'", + cm.exception.args[0], + ) + + def test_link_to_nonrelation_field(self): + with self.assertRaises(KeyError) as cm: + + class BrokenBook1(resources.ModelResource): + class Meta: + model = Book + fields = ("published__invalid",) + + self.assertEqual("Book.published is not a relation", cm.exception.args[0]) + + with self.assertRaises(KeyError) as cm: + + class BrokenBook2(resources.ModelResource): + class Meta: + model = Book + fields = ("author__name__invalid",) + + self.assertEqual("Book.author.name is not a relation", cm.exception.args[0]) + + def test_override_field_construction_in_resource(self): + class B(resources.ModelResource): + class Meta: + model = Book + fields = ("published",) + + @classmethod + def field_from_django_field(self, field_name, django_field, readonly): + if field_name == "published": + return {"sound": "quack"} + + B() + self.assertEqual({"sound": "quack"}, B.fields["published"]) + + def test_readonly_annotated_field_import_and_export(self): + class B(resources.ModelResource): + total_categories = fields.Field("total_categories", readonly=True) + + class Meta: + model = Book + skip_unchanged = True + + cat1 = Category.objects.create(name="Cat 1") + self.book.categories.add(cat1) + + resource = B() + + # Verify that the annotated field is correctly exported + dataset = resource.export( + queryset=Book.objects.annotate(total_categories=Count("categories")) + ) + self.assertEqual(int(dataset.dict[0]["total_categories"]), 1) + + # Verify that importing the annotated field raises no errors and that + # the rows are skipped + result = resource.import_data(dataset, raise_errors=True) + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), len(dataset)) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + + def test_follow_relationship_for_modelresource(self): + class EntryResource(resources.ModelResource): + username = fields.Field(attribute="user__username", readonly=False) + + class Meta: + model = Entry + fields = ("id", "username") + + def after_save_instance(self, instance, row_, **kwargs): + using_transactions = kwargs.get("using_transactions", False) + dry_run = kwargs.get("dry_run", False) + if not using_transactions and dry_run: + # we don't have transactions and we want to do a dry_run + pass + else: + instance.user.save() + + user = User.objects.create(username="foo") + entry = Entry.objects.create(user=user) + row = [ + entry.pk, + "bar", + ] + self.dataset = tablib.Dataset(headers=["id", "username"]) + self.dataset.append(row) + result = EntryResource().import_data( + self.dataset, raise_errors=True, dry_run=False + ) + self.assertFalse(result.has_errors()) + self.assertEqual(User.objects.get(pk=user.pk).username, "bar") + + def test_import_data_dynamic_default_callable(self): + class DynamicDefaultResource(resources.ModelResource): + class Meta: + model = WithDynamicDefault + fields = ( + "id", + "name", + ) + + self.assertTrue(callable(DynamicDefaultResource.fields["name"].default)) + + resource = DynamicDefaultResource() + dataset = tablib.Dataset( + headers=[ + "id", + "name", + ] + ) + dataset.append([1, None]) + dataset.append([2, None]) + resource.import_data(dataset, raise_errors=False) + objs = WithDynamicDefault.objects.all() + self.assertNotEqual(objs[0].name, objs[1].name) + + def test_float_field(self): + # 433 + class R(resources.ModelResource): + class Meta: + model = WithFloatField + + resource = R() + dataset = tablib.Dataset( + headers=[ + "id", + "f", + ] + ) + dataset.append([None, None]) + dataset.append([None, ""]) + resource.import_data(dataset, raise_errors=True) + self.assertEqual(WithFloatField.objects.all()[0].f, None) + self.assertEqual(WithFloatField.objects.all()[1].f, None) + + def test_get_db_connection_name(self): + class BookResource(resources.ModelResource): + class Meta: + using_db = "other_db" + + self.assertEqual(BookResource().get_db_connection_name(), "other_db") + self.assertEqual(CategoryResource().get_db_connection_name(), "default") + + def test_import_data_raises_field_for_wrong_db(self): + class BookResource(resources.ModelResource): + class Meta: + using_db = "wrong_db" + + with self.assertRaises(ConnectionDoesNotExist): + BookResource().import_data(self.dataset) + + def test_natural_foreign_key_detection(self): + """ + Test that when the _meta option for use_natural_foreign_keys + is set on a resource that foreign key widgets are created + with that flag, and when it's off they are not. + """ + + # For future proof testing, we have one resource with natural + # foreign keys on, and one off. If the default ever changes + # this should still work. + class _BookResource_Unfk(resources.ModelResource): + class Meta: + use_natural_foreign_keys = True + model = Book + + class _BookResource(resources.ModelResource): + class Meta: + use_natural_foreign_keys = False + model = Book + + resource_with_nfks = _BookResource_Unfk() + author_field_widget = resource_with_nfks.fields["author"].widget + self.assertTrue(author_field_widget.use_natural_foreign_keys) + + resource_without_nfks = _BookResource() + author_field_widget = resource_without_nfks.fields["author"].widget + self.assertFalse(author_field_widget.use_natural_foreign_keys) + + def test_natural_foreign_key_false_positives(self): + """ + Ensure that if the field's model does not have natural foreign + key functions, it is not set to use natural foreign keys. + """ + from django.db import models + + class RelatedModel(models.Model): + name = models.CharField() + + class Meta: + app_label = "Test" + + class TestModel(models.Model): + related_field = models.ForeignKey(RelatedModel, on_delete=models.PROTECT) + + class Meta: + app_label = "Test" + + class TestModelResource(resources.ModelResource): + class Meta: + model = TestModel + fields = ("id", "related_field") + use_natural_foreign_keys = True + + resource = TestModelResource() + related_field_widget = resource.fields["related_field"].widget + self.assertFalse(related_field_widget.use_natural_foreign_keys) diff --git a/tests/core/tests/test_resources/test_modelresource/test_error_handling.py b/tests/core/tests/test_resources/test_modelresource/test_error_handling.py new file mode 100644 index 000000000..c9893041c --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_error_handling.py @@ -0,0 +1,176 @@ +from unittest import mock + +import tablib +from core.models import Author, Book +from core.tests.resources import ( + AuthorResource, + BookResource, + BookResourceWithLineNumberLogger, + ProfileResource, +) +from django.core.exceptions import ImproperlyConfigured, ValidationError +from django.test import TestCase + +from import_export import exceptions, resources, results + + +class ErrorHandlingTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + @mock.patch("import_export.resources.connections") + def test_ImproperlyConfigured_if_use_transactions_set_when_not_supported( + self, mock_db_connections + ): + class Features: + supports_transactions = False + + class DummyConnection: + features = Features() + + dummy_connection = DummyConnection() + mock_db_connections.__getitem__.return_value = dummy_connection + with self.assertRaises(ImproperlyConfigured): + self.resource.import_data( + self.dataset, + use_transactions=True, + ) + + def test_importing_with_line_number_logging(self): + resource = BookResourceWithLineNumberLogger() + resource.import_data(self.dataset, raise_errors=True) + self.assertEqual(resource.before_lines, [1]) + self.assertEqual(resource.after_lines, [1]) + + def test_import_data_raises_field_specific_validation_errors(self): + resource = AuthorResource() + dataset = tablib.Dataset(headers=["name", "birthday"]) + dataset.append(["A.A.Milne", "1882test-01-18"]) + + result = resource.import_data(dataset, raise_errors=False) + + self.assertTrue(result.has_validation_errors()) + self.assertIs(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_INVALID) + self.assertIn("birthday", result.invalid_rows[0].field_specific_errors) + + def test_import_data_raises_field_specific_validation_errors_with_skip_unchanged( + self, + ): + resource = AuthorResource() + author = Author.objects.create(name="Some author") + + dataset = tablib.Dataset(headers=["id", "birthday"]) + dataset.append([author.id, "1882test-01-18"]) + + result = resource.import_data(dataset, raise_errors=False) + + self.assertTrue(result.has_validation_errors()) + self.assertIs(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_INVALID) + self.assertIn("birthday", result.invalid_rows[0].field_specific_errors) + + def test_import_data_empty_dataset_with_collect_failed_rows(self): + class _AuthorResource(resources.ModelResource): + class Meta: + model = Author + import_id_fields = ["non_existent_field"] + + resource = _AuthorResource() + with self.assertRaises(exceptions.ImportError) as e: + resource.import_data( + tablib.Dataset(), collect_failed_rows=True, raise_errors=True + ) + self.assertEqual( + "The following fields are declared in 'import_id_fields' " + "but are not present in the resource fields: non_existent_field", + str(e.exception), + ) + + def test_collect_failed_rows(self): + resource = ProfileResource() + headers = ["id", "user"] + # 'user' is a required field, the database will raise an error. + row = [None, None] + dataset = tablib.Dataset(row, headers=headers) + result = resource.import_data( + dataset, + dry_run=True, + use_transactions=True, + collect_failed_rows=True, + ) + self.assertEqual(result.failed_dataset.headers, ["id", "user", "Error"]) + self.assertEqual(len(result.failed_dataset), 1) + # We can't check the error message because it's package- and version-dependent + + def test_row_result_raise_errors(self): + resource = ProfileResource() + headers = ["id", "user"] + # 'user' is a required field, the database will raise an error. + row = [None, None] + dataset = tablib.Dataset(row, headers=headers) + with self.assertRaises(exceptions.ImportError) as exc: + resource.import_data( + dataset, + dry_run=True, + use_transactions=True, + raise_errors=True, + ) + row_error = exc.exception + self.assertEqual(1, row_error.number) + self.assertEqual({"id": None, "user": None}, row_error.row) + + def test_collect_failed_rows_validation_error(self): + resource = ProfileResource() + row = ["1"] + dataset = tablib.Dataset(row, headers=["id"]) + with mock.patch( + "import_export.resources.Field.save", side_effect=Exception("fail!") + ): + result = resource.import_data( + dataset, + dry_run=True, + use_transactions=True, + collect_failed_rows=True, + ) + self.assertEqual(result.failed_dataset.headers, ["id", "Error"]) + self.assertEqual( + 1, + len(result.failed_dataset), + ) + self.assertEqual("1", result.failed_dataset.dict[0]["id"]) + self.assertEqual("fail!", result.failed_dataset.dict[0]["Error"]) + + def test_row_result_raise_ValidationError(self): + resource = ProfileResource() + row = ["1"] + dataset = tablib.Dataset(row, headers=["id"]) + with mock.patch( + "import_export.resources.Field.save", side_effect=ValidationError("fail!") + ): + with self.assertRaisesRegex( + exceptions.ImportError, "{'__all__': \\['fail!'\\]}" + ): + resource.import_data( + dataset, + dry_run=True, + use_transactions=True, + raise_errors=True, + ) + + def test_row_result_raise_ValidationError_collect_failed_rows(self): + # 1752 + resource = ProfileResource() + row = ["1"] + dataset = tablib.Dataset(row, headers=["id"]) + with mock.patch( + "import_export.resources.Field.save", side_effect=ValidationError("fail!") + ): + res = resource.import_data( + dataset, use_transactions=True, collect_failed_rows=True + ) + self.assertEqual( + res.failed_dataset.dict[0], {"id": "1", "Error": "{'__all__': ['fail!']}"} + ) diff --git a/tests/core/tests/test_resources/test_modelresource/test_export.py b/tests/core/tests/test_resources/test_modelresource/test_export.py new file mode 100644 index 000000000..50f1ea25d --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_export.py @@ -0,0 +1,119 @@ +from datetime import date + +import tablib +from core.admin import BookResource +from core.models import Author, Book, EBook +from django.test import TestCase + +from import_export.fields import Field +from import_export.resources import ModelResource + + +class ExportFunctionalityTest(TestCase): + fixtures = ["author"] + + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_get_export_headers(self): + headers = self.resource.get_export_headers() + self.assertEqual( + headers, + [ + "id", + "name", + "author", + "author_email", + "imported", + "published", + "published_time", + "price", + "added", + "categories", + ], + ) + + def test_export(self): + with self.assertNumQueries(2): + dataset = self.resource.export(queryset=Book.objects.all()) + self.assertEqual(len(dataset), 1) + + def test_export_with_foreign_keys(self): + """ + Test that export() containing foreign keys doesn't generate + extra query for every row. + Fixes #974 + """ + author = Author.objects.create() + self.book.author = author + self.book.save() + Book.objects.create(name="Second book", author=Author.objects.create()) + Book.objects.create(name="Third book", author=Author.objects.create()) + + with self.assertNumQueries(3): + dataset = self.resource.export(Book.objects.prefetch_related("categories")) + self.assertEqual(dataset.dict[0]["author"], author.pk) + self.assertEqual(len(dataset), 3) + + def test_export_iterable(self): + with self.assertNumQueries(2): + dataset = self.resource.export(queryset=list(Book.objects.all())) + self.assertEqual(len(dataset), 1) + + def test_export_prefetch_related(self): + with self.assertNumQueries(3): + dataset = self.resource.export( + queryset=Book.objects.prefetch_related("categories").all() + ) + self.assertEqual(len(dataset), 1) + + def test_export_handles_named_queryset_parameter(self): + class _BookResource(BookResource): + def before_export(self, queryset, **kwargs): + self.qs = queryset + self.kwargs_ = kwargs + + self.resource = _BookResource() + # when queryset is supplied, it should be passed to before_export() + self.resource.export(queryset=Book.objects.all(), **{"a": 1}) + self.assertEqual(Book.objects.count(), len(self.resource.qs)) + self.assertEqual({"a": 1}, self.resource.kwargs_) + + def test_export_declared_field(self): + # test that declared fields with no attribute return empty value + # see 1874 + class EBookResource(ModelResource): + published = Field(column_name="published") + + class Meta: + model = EBook + fields = ("id", "published") + + resource = EBookResource() + + self.book.published = date(1955, 4, 5) + self.book.save() + dataset = resource.export() + self.assertEqual("", dataset.dict[0]["published"]) + + def test_export_declared_field_custom_name(self): + # test that declared fields with a name which differs from attribute and + # column_name is exported + # see 1893 + class EBookResource(ModelResource): + auteur_name = Field(attribute="author__name", column_name="Author Name") + + class Meta: + model = EBook + fields = ("id", "auteur_name") + + resource = EBookResource() + + self.book.author = Author.objects.get(pk=5) + self.book.save() + dataset = resource.export() + self.assertEqual("Ian Fleming", dataset.dict[0]["Author Name"]) diff --git a/tests/core/tests/test_resources/test_modelresource/test_fields.py b/tests/core/tests/test_resources/test_modelresource/test_fields.py new file mode 100644 index 000000000..aad15ba01 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_fields.py @@ -0,0 +1,47 @@ +from unittest import mock + +import tablib +from core.models import Book +from core.tests.resources import BookResource +from django.test import TestCase + +from import_export import fields, resources, widgets + + +class FieldHandlingTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_get_instance(self): + instance_loader = self.resource._meta.instance_loader_class(self.resource) + self.resource._meta.import_id_fields = ["id"] + instance = self.resource.get_instance(instance_loader, self.dataset.dict[0]) + self.assertEqual(instance, self.book) + + def test_get_instance_import_id_fields(self): + class BookResource(resources.ModelResource): + name = fields.Field(attribute="name", widget=widgets.CharWidget()) + + class Meta: + model = Book + import_id_fields = ["name"] + + resource = BookResource() + instance_loader = resource._meta.instance_loader_class(resource) + instance = resource.get_instance(instance_loader, self.dataset.dict[0]) + self.assertEqual(instance, self.book) + + def test_get_instance_usually_defers_to_instance_loader(self): + self.resource._meta.import_id_fields = ["id"] + + instance_loader = self.resource._meta.instance_loader_class(self.resource) + + with mock.patch.object(instance_loader, "get_instance") as mocked_method: + row = self.dataset.dict[0] + self.resource.get_instance(instance_loader, row) + # instance_loader.get_instance() should have been called + mocked_method.assert_called_once_with(row) diff --git a/tests/core/tests/test_resources/test_modelresource/test_m2m.py b/tests/core/tests/test_resources/test_modelresource/test_m2m.py new file mode 100644 index 000000000..14cf83c87 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_m2m.py @@ -0,0 +1,129 @@ +import tablib +from core.models import Author, Book, Category +from core.tests.resources import BookResource +from django.test import TestCase + +from import_export import fields, resources, widgets + + +class ForeignKeyM2MTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_foreign_keys_export(self): + author1 = Author.objects.create(name="Foo") + self.book.author = author1 + self.book.save() + + dataset = self.resource.export(Book.objects.all()) + self.assertEqual(dataset.dict[0]["author"], author1.pk) + + def test_foreign_keys_import(self): + author2 = Author.objects.create(name="Bar") + headers = ["id", "name", "author"] + row = [None, "FooBook", author2.pk] + dataset = tablib.Dataset(row, headers=headers) + self.resource.import_data(dataset, raise_errors=True) + + book = Book.objects.get(name="FooBook") + self.assertEqual(book.author, author2) + + def test_m2m_export(self): + cat1 = Category.objects.create(name="Cat 1") + cat2 = Category.objects.create(name="Cat 2") + self.book.categories.add(cat1) + self.book.categories.add(cat2) + + dataset = self.resource.export(Book.objects.all()) + self.assertEqual(dataset.dict[0]["categories"], "%d,%d" % (cat1.pk, cat2.pk)) + + def test_m2m_import(self): + cat1 = Category.objects.create(name="Cat 1") + headers = ["id", "name", "categories"] + row = [None, "FooBook", str(cat1.pk)] + dataset = tablib.Dataset(row, headers=headers) + self.resource.import_data(dataset, raise_errors=True) + + book = Book.objects.get(name="FooBook") + self.assertIn(cat1, book.categories.all()) + + def test_m2m_import_clear(self): + cat1 = Category.objects.create(name="Cat 1") + self.book.categories.add(cat1) + self.assertEqual(1, self.book.categories.count()) + headers = ["id", "name", "categories"] + row = [self.book.pk, "FooBook", ""] + dataset = tablib.Dataset(row, headers=headers) + self.resource.import_data(dataset, raise_errors=True) + + book = Book.objects.get(name="FooBook") + self.assertEqual(0, book.categories.count()) + + def test_m2m_options_import(self): + cat1 = Category.objects.create(name="Cat 1") + cat2 = Category.objects.create(name="Cat 2") + headers = ["id", "name", "categories"] + row = [None, "FooBook", "Cat 1|Cat 2"] + dataset = tablib.Dataset(row, headers=headers) + + class BookM2MResource(resources.ModelResource): + categories = fields.Field( + attribute="categories", + widget=widgets.ManyToManyWidget(Category, field="name", separator="|"), + ) + + class Meta: + model = Book + + resource = BookM2MResource() + resource.import_data(dataset, raise_errors=True) + book = Book.objects.get(name="FooBook") + self.assertIn(cat1, book.categories.all()) + self.assertIn(cat2, book.categories.all()) + + def test_m2m_add(self): + cat1 = Category.objects.create(name="Cat 1") + cat2 = Category.objects.create(name="Cat 2") + cat3 = Category.objects.create(name="Cat 3") + cat4 = Category.objects.create(name="Cat 4") + headers = ["id", "name", "categories"] + row = [None, "FooBook", "Cat 1|Cat 2"] + dataset = tablib.Dataset(row, headers=headers) + + class BookM2MResource(resources.ModelResource): + categories = fields.Field( + attribute="categories", + m2m_add=True, + widget=widgets.ManyToManyWidget(Category, field="name", separator="|"), + ) + + class Meta: + model = Book + + resource = BookM2MResource() + resource.import_data(dataset, raise_errors=True) + book = Book.objects.get(name="FooBook") + self.assertIn(cat1, book.categories.all()) + self.assertIn(cat2, book.categories.all()) + self.assertNotIn(cat3, book.categories.all()) + self.assertNotIn(cat4, book.categories.all()) + + row1 = [ + book.id, + "FooBook", + "Cat 1|Cat 2", + ] # This should have no effect, since Cat 1 and Cat 2 already exist + row2 = [book.id, "FooBook", "Cat 3|Cat 4"] + dataset = tablib.Dataset(row1, row2, headers=headers) + resource.import_data(dataset, raise_errors=True) + book2 = Book.objects.get(name="FooBook") + self.assertEqual(book.id, book2.id) + self.assertEqual(book.categories.count(), 4) + self.assertIn(cat1, book2.categories.all()) + self.assertIn(cat2, book2.categories.all()) + self.assertIn(cat3, book2.categories.all()) + self.assertIn(cat4, book2.categories.all()) diff --git a/tests/core/tests/test_resources/test_modelresource/test_queryset.py b/tests/core/tests/test_resources/test_modelresource/test_queryset.py new file mode 100644 index 000000000..299cf427e --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_queryset.py @@ -0,0 +1,49 @@ +from unittest import mock + +import tablib +from core.models import Book +from core.tests.resources import BookResource +from django.core.paginator import Paginator +from django.test import TestCase + + +class QuerysetHandlingTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_iter_queryset(self): + qs = Book.objects.all() + with mock.patch.object(qs, "iterator") as mocked_method: + list(self.resource.iter_queryset(qs)) + mocked_method.assert_called_once_with(chunk_size=100) + + def test_iter_queryset_prefetch_unordered(self): + qsu = Book.objects.prefetch_related("categories").all() + qso = qsu.order_by("pk").all() + with mock.patch.object(qsu, "order_by") as mocked_method: + mocked_method.return_value = qso + list(self.resource.iter_queryset(qsu)) + mocked_method.assert_called_once_with("pk") + + def test_iter_queryset_prefetch_ordered(self): + qs = Book.objects.prefetch_related("categories").order_by("pk").all() + with mock.patch("import_export.resources.Paginator", autospec=True) as p: + p.return_value = Paginator(qs, 100) + list(self.resource.iter_queryset(qs)) + p.assert_called_once_with(qs, 100) + + def test_iter_queryset_prefetch_chunk_size(self): + class B(BookResource): + class Meta: + chunk_size = 1000 + + paginator = "import_export.resources.Paginator" + qs = Book.objects.prefetch_related("categories").order_by("pk").all() + with mock.patch(paginator, autospec=True) as mocked_obj: + mocked_obj.return_value = Paginator(qs, 1000) + list(B().iter_queryset(qs)) + mocked_obj.assert_called_once_with(qs, 1000) diff --git a/tests/core/tests/test_resources/test_modelresource/test_relationship.py b/tests/core/tests/test_resources/test_modelresource/test_relationship.py new file mode 100644 index 000000000..c030d327e --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_relationship.py @@ -0,0 +1,126 @@ +from datetime import date + +import tablib +from core.models import Author, Book +from core.tests.resources import BookResource +from django.test import TestCase + +from import_export import fields, resources + + +class RelationshipFieldTest(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_relationships_fields(self): + class B(resources.ModelResource): + class Meta: + model = Book + fields = ("author__name",) + + author = Author.objects.create(name="Author") + self.book.author = author + resource = B() + result = resource.fields["author__name"].export(self.book) + self.assertEqual(result, author.name) + + def test_dehydrating_fields(self): + class B(resources.ModelResource): + full_title = fields.Field(column_name="Full title") + + class Meta: + model = Book + fields = ("author__name", "full_title") + + def dehydrate_full_title(self, obj): + return f"{obj.name} by {obj.author.name}" + + author = Author.objects.create(name="Author") + self.book.author = author + resource = B() + full_title = resource.export_field(resource.fields["full_title"], self.book) + self.assertEqual(full_title, f"{self.book.name} by {self.book.author.name}") + + def test_dehydrating_field_using_callable(self): + class B(resources.ModelResource): + full_title = fields.Field( + column_name="Full title", + dehydrate_method=lambda obj: f"{obj.name} by {obj.author.name}", + ) + + class Meta: + model = Book + fields = ("author__name", "full_title") + + author = Author.objects.create(name="Author") + self.book.author = author + resource = B() + full_title = resource.export_field(resource.fields["full_title"], self.book) + self.assertEqual(full_title, f"{self.book.name} by {self.book.author.name}") + + def test_dehydrate_field_using_custom_dehydrate_field_method(self): + class B(resources.ModelResource): + full_title = fields.Field( + column_name="Full title", dehydrate_method="foo_dehydrate_full_title" + ) + + class Meta: + model = Book + fields = "full_title" + + def foo_dehydrate_full_title(self, obj): + return f"{obj.name} by {obj.author.name}" + + author = Author.objects.create(name="Author") + self.book.author = author + resource = B() + + full_title = resource.export_field(resource.fields["full_title"], self.book) + self.assertEqual(full_title, f"{self.book.name} by {self.book.author.name}") + + def test_invalid_relation_field_name(self): + class B(resources.ModelResource): + full_title = fields.Field(column_name="Full title") + + class Meta: + model = Book + # author_name is not a valid field or relation, + # so should be ignored + fields = ("author_name", "full_title") + + resource = B() + self.assertEqual(1, len(resource.fields)) + self.assertEqual("full_title", list(resource.fields.keys())[0]) + + def test_widget_format_in_fk_field(self): + class B(resources.ModelResource): + class Meta: + model = Book + fields = ("author__birthday",) + widgets = { + "author__birthday": {"format": "%Y-%m-%d"}, + } + + author = Author.objects.create(name="Author") + self.book.author = author + resource = B() + result = resource.fields["author__birthday"].export(self.book) + self.assertEqual(result, str(date.today())) + + def test_widget_kwargs_for_field(self): + class B(resources.ModelResource): + class Meta: + model = Book + fields = ("published",) + widgets = { + "published": {"format": "%d.%m.%Y"}, + } + + resource = B() + self.book.published = date(2012, 8, 13) + result = resource.fields["published"].export(self.book) + self.assertEqual(result, "13.08.2012") diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource.py b/tests/core/tests/test_resources/test_modelresource/test_resource.py new file mode 100644 index 000000000..e3a7a420e --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource.py @@ -0,0 +1,112 @@ +from collections import OrderedDict +from unittest import mock +from unittest.mock import patch + +import tablib +from core.models import Book +from core.tests.resources import MyResource +from django.test import TestCase + +from import_export import fields, results +from import_export.options import ResourceOptions + + +class ResourceTestCase(TestCase): + def setUp(self): + self.my_resource = MyResource() + + def test_fields(self): + """Check that fields were determined correctly""" + + # check that our fields were determined + self.assertIn("name", self.my_resource.fields) + + # check that resource instance fields attr isn't link to resource cls + # fields + self.assertFalse(MyResource.fields is self.my_resource.fields) + + # dynamically add new resource field into resource instance + self.my_resource.fields.update( + OrderedDict( + [ + ("new_field", fields.Field()), + ] + ) + ) + + # check that new field in resource instance fields + self.assertIn("new_field", self.my_resource.fields) + + # check that new field not in resource cls fields + self.assertNotIn("new_field", MyResource.fields) + + def test_kwargs(self): + target_kwargs = {"a": 1} + my_resource = MyResource(**target_kwargs) + self.assertEqual(my_resource.kwargs, target_kwargs) + + def test_field_column_name(self): + field = self.my_resource.fields["name"] + self.assertIn(field.column_name, "name") + + def test_meta(self): + self.assertIsInstance(self.my_resource._meta, ResourceOptions) + + @mock.patch("builtins.dir") + def test_new_handles_null_options(self, mock_dir): + # #1163 - simulates a call to dir() returning additional attributes + mock_dir.return_value = ["attrs"] + + class A(MyResource): + pass + + A() + + def test_get_export_headers_order(self): + self.assertEqual( + self.my_resource.get_export_headers(), ["email", "name", "extra"] + ) + + def test_default_after_import(self): + self.assertIsNone( + self.my_resource.after_import( + tablib.Dataset(), + results.Result(), + ) + ) + + def test_get_use_transactions_defined_in_resource(self): + class A(MyResource): + class Meta: + use_transactions = True + + resource = A() + self.assertTrue(resource.get_use_transactions()) + + def test_get_field_name_raises_AttributeError(self): + err = ( + "Field x does not exists in resource" + ) + with self.assertRaisesRegex(AttributeError, err): + self.my_resource.get_field_name("x") + + def test_init_instance_raises_NotImplementedError(self): + with self.assertRaises(NotImplementedError): + self.my_resource.init_instance([]) + + @patch("core.models.Book.full_clean") + def test_validate_instance_called_with_import_validation_errors_as_None( + self, full_clean_mock + ): + # validate_instance() import_validation_errors is an optional kwarg + # If not provided, it defaults to an empty dict + # this tests that scenario by ensuring that an empty dict is passed + # to the model instance full_clean() method. + book = Book() + self.my_resource._meta.clean_model_instances = True + self.my_resource.validate_instance(book) + target = {} + full_clean_mock.assert_called_once_with( + exclude=target.keys(), validate_unique=True + ) diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource_factory.py b/tests/core/tests/test_resources/test_modelresource/test_resource_factory.py new file mode 100644 index 000000000..ebf534e62 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource_factory.py @@ -0,0 +1,11 @@ +from core.models import Book +from django.test import TestCase + +from import_export import resources + + +class ModelResourceFactoryTest(TestCase): + def test_create(self): + BookResource = resources.modelresource_factory(Book) + self.assertIn("id", BookResource.fields) + self.assertEqual(BookResource._meta.model, Book) diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource_fields.py b/tests/core/tests/test_resources/test_modelresource/test_resource_fields.py new file mode 100644 index 000000000..c1a81130d --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource_fields.py @@ -0,0 +1,175 @@ +import warnings + +import tablib +from core.models import Book +from django.test import TestCase + +from import_export import fields, resources + +# ignore warnings which result from invalid field declaration (#1930) +warnings.simplefilter("ignore") + + +class ModelResourceFieldDeclarations(TestCase): + class MyBookResource(resources.ModelResource): + author_email = fields.Field( + attribute="author_email", column_name="author_email" + ) + + class Meta: + model = Book + fields = ("id", "price") + + def setUp(self): + + self.book = Book.objects.create(name="Moonraker", price=".99") + self.resource = ModelResourceFieldDeclarations.MyBookResource() + + def test_declared_field_not_imported(self): + self.assertEqual("", self.book.author_email) + rows = [ + (self.book.id, "12.99", "jj@example.com"), + ] + dataset = tablib.Dataset(*rows, headers=["id", "price", "author_email"]) + self.resource.import_data(dataset, raise_errors=True) + self.book.refresh_from_db() + # email should not be updated + self.assertEqual("", self.book.author_email) + + def test_declared_field_not_exported(self): + self.assertEqual("", self.book.author_email) + data = self.resource.export() + self.assertFalse("author_email" in data.dict[0]) + + +class ModelResourceNoFieldDeclarations(TestCase): + # No `fields` declaration so all fields should be included + class MyBookResource(resources.ModelResource): + author_email = fields.Field( + attribute="author_email", column_name="author_email" + ) + + class Meta: + model = Book + + def setUp(self): + self.book = Book.objects.create(name="Moonraker", price=".99") + self.resource = ModelResourceNoFieldDeclarations.MyBookResource() + + def test_declared_field_imported(self): + self.assertEqual("", self.book.author_email) + rows = [ + (self.book.id, "12.99", "jj@example.com"), + ] + dataset = tablib.Dataset(*rows, headers=["id", "price", "author_email"]) + self.resource.import_data(dataset, raise_errors=True) + self.book.refresh_from_db() + # email should be updated + self.assertEqual("jj@example.com", self.book.author_email) + + def test_declared_field_not_exported(self): + self.assertEqual("", self.book.author_email) + data = self.resource.export() + self.assertTrue("author_email" in data.dict[0]) + + +class ModelResourceExcludeDeclarations(TestCase): + class MyBookResource(resources.ModelResource): + author_email = fields.Field( + attribute="author_email", column_name="author_email" + ) + + class Meta: + model = Book + fields = ("id", "price") + exclude = ("author_email",) + + def setUp(self): + self.book = Book.objects.create(name="Moonraker", price=".99") + self.resource = ModelResourceExcludeDeclarations.MyBookResource() + + def test_excluded_field_not_imported(self): + self.assertEqual("", self.book.author_email) + rows = [ + (self.book.id, "12.99", "jj@example.com"), + ] + dataset = tablib.Dataset(*rows, headers=["id", "price", "author_email"]) + self.resource.import_data(dataset, raise_errors=True) + self.book.refresh_from_db() + # email should not be updated + self.assertEqual("", self.book.author_email) + + def test_declared_field_not_exported(self): + self.assertEqual("", self.book.author_email) + data = self.resource.export() + self.assertFalse("author_email" in data.dict[0]) + + +class ModelResourceFieldsAndExcludeDeclarations(TestCase): + # Include the same field in both `fields` and `exclude`. + # `fields` should take precedence. + class MyBookResource(resources.ModelResource): + author_email = fields.Field( + attribute="author_email", column_name="author_email" + ) + + class Meta: + model = Book + fields = ("id", "price", "author_email") + exclude = ("author_email",) + + def setUp(self): + self.book = Book.objects.create(name="Moonraker", price=".99") + self.resource = ModelResourceFieldsAndExcludeDeclarations.MyBookResource() + + def test_excluded_field_not_imported(self): + self.assertEqual("", self.book.author_email) + rows = [ + (self.book.id, "12.99", "jj@example.com"), + ] + dataset = tablib.Dataset(*rows, headers=["id", "price", "author_email"]) + self.resource.import_data(dataset, raise_errors=True) + self.book.refresh_from_db() + # email should be updated + self.assertEqual("jj@example.com", self.book.author_email) + + def test_declared_field_not_exported(self): + self.assertEqual("", self.book.author_email) + data = self.resource.export() + self.assertTrue("author_email" in data.dict[0]) + + +class ModelResourceDeclarationsNotInImportTest(TestCase): + # issue 1697 + # Add a declared field to the Resource, which is not present in the import file. + # The import should succeed without issue. + class MyBookResource(resources.ModelResource): + author_email = fields.Field( + attribute="author_email", column_name="author_email" + ) + + class Meta: + model = Book + fields = ( + "id", + "price", + ) + + def setUp(self): + self.resource = ModelResourceDeclarationsNotInImportTest.MyBookResource() + + def test_excluded_field_not_imported(self): + rows = [ + ("1", "12.99"), + ] + dataset = tablib.Dataset(*rows, headers=["id", "price"]) + result = self.resource.import_data(dataset, raise_errors=True) + book = Book.objects.first() + self.assertEqual("", book.author_email) + self.assertEqual(1, result.totals["new"]) + + def test_excluded_field_not_exported(self): + self.book = Book.objects.create(name="Moonraker", price=".99") + self.assertEqual("", self.book.author_email) + data = self.resource.export() + self.assertFalse("author_email" in data.dict[0]) diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource_postgres.py b/tests/core/tests/test_resources/test_modelresource/test_resource_postgres.py new file mode 100644 index 000000000..ff7fd30c1 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource_postgres.py @@ -0,0 +1,188 @@ +import json +import sys +from unittest import skipUnless + +import tablib +from core.models import Book +from core.tests.resources import BookResource +from django.conf import settings +from django.db import IntegrityError +from django.db.models import CharField +from django.test import TestCase, TransactionTestCase + +from import_export import fields, resources, widgets + + +class ModelResourcePostgresModuleLoadTest(TestCase): + pg_module_name = "django.contrib.postgres.fields" + + class ImportRaiser: + def find_spec(self, fullname, path, target=None): + if fullname == ModelResourcePostgresModuleLoadTest.pg_module_name: + # we get here if the module is not loaded and not in sys.modules + raise ImportError() + + def setUp(self): + super().setUp() + self.resource = BookResource() + if self.pg_module_name in sys.modules: + self.pg_modules = sys.modules[self.pg_module_name] + del sys.modules[self.pg_module_name] + + def tearDown(self): + super().tearDown() + sys.modules[self.pg_module_name] = self.pg_modules + + def test_widget_from_django_field_cannot_import_postgres(self): + # test that default widget is returned if postgres extensions + # are not present + sys.meta_path.insert(0, self.ImportRaiser()) + + f = fields.Field() + res = self.resource.widget_from_django_field(f) + self.assertEqual(widgets.Widget, res) + + +@skipUnless( + "postgresql" in settings.DATABASES["default"]["ENGINE"], "Run only against Postgres" +) +class PostgresTests(TransactionTestCase): + # Make sure to start the sequences back at 1 + reset_sequences = True + + def test_create_object_after_importing_dataset_with_id(self): + dataset = tablib.Dataset(headers=["id", "name"]) + dataset.append([1, "Some book"]) + resource = BookResource() + result = resource.import_data(dataset) + self.assertFalse(result.has_errors()) + try: + Book.objects.create(name="Some other book") + except IntegrityError: + self.fail("IntegrityError was raised.") + + def test_widget_from_django_field_for_ArrayField_returns_SimpleArrayWidget(self): + f = ArrayField(CharField) + resource = BookResource() + res = resource.widget_from_django_field(f) + self.assertEqual(widgets.SimpleArrayWidget, res) + + +if "postgresql" in settings.DATABASES["default"]["ENGINE"]: + from django.contrib.postgres.fields import ArrayField + from django.db import models + + class BookWithChapters(models.Model): + name = models.CharField("Book name", max_length=100) + chapters = ArrayField(models.CharField(max_length=100), default=list) + data = models.JSONField(null=True) + + class BookWithChapterNumbers(models.Model): + name = models.CharField("Book name", max_length=100) + chapter_numbers = ArrayField(models.PositiveSmallIntegerField(), default=list) + + class BookWithChaptersResource(resources.ModelResource): + class Meta: + model = BookWithChapters + fields = ( + "id", + "name", + "chapters", + "data", + ) + + class BookWithChapterNumbersResource(resources.ModelResource): + class Meta: + model = BookWithChapterNumbers + fields = ( + "id", + "name", + "chapter_numbers", + ) + + class TestExportArrayField(TestCase): + def test_exports_array_field(self): + dataset_headers = ["id", "name", "chapters"] + chapters = ["Introduction", "Middle Chapter", "Ending"] + dataset_row = ["1", "Book With Chapters", ",".join(chapters)] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + book_with_chapters_resource = resources.modelresource_factory( + model=BookWithChapters + )() + result = book_with_chapters_resource.import_data(dataset, dry_run=False) + + self.assertFalse(result.has_errors()) + book_with_chapters = list(BookWithChapters.objects.all())[0] + self.assertListEqual(book_with_chapters.chapters, chapters) + + class TestImportArrayField(TestCase): + def setUp(self): + self.resource = BookWithChaptersResource() + self.chapters = ["Introduction", "Middle Chapter", "Ending"] + self.book = BookWithChapters.objects.create(name="foo") + self.dataset = tablib.Dataset(headers=["id", "name", "chapters"]) + row = [self.book.id, "Some book", ",".join(self.chapters)] + self.dataset.append(row) + + def test_import_of_data_with_array(self): + self.assertListEqual(self.book.chapters, []) + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + + self.book.refresh_from_db() + self.assertEqual(self.book.chapters, self.chapters) + + class TestImportIntArrayField(TestCase): + def setUp(self): + self.resource = BookWithChapterNumbersResource() + self.chapter_numbers = [1, 2, 3] + self.book = BookWithChapterNumbers.objects.create( + name="foo", chapter_numbers=[] + ) + self.dataset = tablib.Dataset( + *[(1, "some book", "1,2,3")], headers=["id", "name", "chapter_numbers"] + ) + + def test_import_of_data_with_int_array(self): + # issue #1495 + self.assertListEqual(self.book.chapter_numbers, []) + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + + self.book.refresh_from_db() + self.assertEqual(self.book.chapter_numbers, self.chapter_numbers) + + class TestExportJsonField(TestCase): + def setUp(self): + self.json_data = {"some_key": "some_value"} + self.book = BookWithChapters.objects.create(name="foo", data=self.json_data) + + def test_export_field_with_appropriate_format(self): + resource = resources.modelresource_factory(model=BookWithChapters)() + result = resource.export(BookWithChapters.objects.all()) + self.assertEqual(result[0][3], json.dumps(self.json_data)) + + class TestImportJsonField(TestCase): + def setUp(self): + self.resource = BookWithChaptersResource() + self.data = {"some_key": "some_value"} + self.json_data = json.dumps(self.data) + self.book = BookWithChapters.objects.create(name="foo") + self.dataset = tablib.Dataset(headers=["id", "name", "data"]) + row = [self.book.id, "Some book", self.json_data] + self.dataset.append(row) + + def test_sets_json_data_when_model_field_is_empty(self): + self.assertIsNone(self.book.data) + result = self.resource.import_data(self.dataset, raise_errors=True) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + + self.book.refresh_from_db() + self.assertEqual(self.book.data, self.data) diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource_setup.py b/tests/core/tests/test_resources/test_modelresource/test_resource_setup.py new file mode 100644 index 000000000..1bc3bc3f6 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource_setup.py @@ -0,0 +1,63 @@ +import tablib +from core.models import Author, Book +from core.tests.resources import BookResource, WithDefaultResource +from django.test import TestCase + +from import_export import resources, widgets +from import_export.instance_loaders import ModelInstanceLoader + + +class TestResourceSetup(TestCase): + def setUp(self): + self.resource = BookResource() + self.book = Book.objects.create(name="Some book") + self.dataset = tablib.Dataset(headers=["id", "name", "author_email", "price"]) + row = [self.book.pk, "Some book", "test@example.com", "10.25"] + self.dataset.append(row) + + def test_default_instance_loader_class(self): + self.assertIs(self.resource._meta.instance_loader_class, ModelInstanceLoader) + + def test_fields(self): + fields = self.resource.fields + self.assertIn("id", fields) + self.assertIn("name", fields) + self.assertIn("author_email", fields) + self.assertIn("price", fields) + + def test_fields_foreign_key(self): + fields = self.resource.fields + self.assertIn("author", fields) + widget = fields["author"].widget + self.assertIsInstance(widget, widgets.ForeignKeyWidget) + self.assertEqual(widget.model, Author) + + def test_get_display_name(self): + display_name = self.resource.get_display_name() + self.assertEqual(display_name, "BookResource") + + class BookResource(resources.ModelResource): + class Meta: + name = "Foo Name" + model = Book + import_id_fields = ["name"] + + resource = BookResource() + display_name = resource.get_display_name() + self.assertEqual(display_name, "Foo Name") + + def test_fields_m2m(self): + fields = self.resource.fields + self.assertIn("categories", fields) + + def test_excluded_fields(self): + self.assertNotIn("imported", self.resource.fields) + + def test_init_instance(self): + instance = self.resource.init_instance() + self.assertIsInstance(instance, Book) + + def test_default(self): + self.assertEqual( + WithDefaultResource.fields["name"].clean({"name": ""}), "foo_bar" + ) diff --git a/tests/core/tests/test_resources/test_modelresource/test_resource_transactions.py b/tests/core/tests/test_resources/test_modelresource/test_resource_transactions.py new file mode 100644 index 000000000..378020e71 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_resource_transactions.py @@ -0,0 +1,118 @@ +import tablib +from core.models import Author, Book, Category, Profile +from core.tests.resources import ( + AuthorResource, + BookResource, + CategoryResource, + ProfileResource, +) +from django.test import TransactionTestCase, skipUnlessDBFeature +from django.utils.encoding import force_str +from django.utils.html import strip_tags + + +class ModelResourceTransactionTest(TransactionTestCase): + @skipUnlessDBFeature("supports_transactions") + def test_m2m_import_with_transactions(self): + resource = BookResource() + cat1 = Category.objects.create(name="Cat 1") + headers = ["id", "name", "categories"] + row = [None, "FooBook", str(cat1.pk)] + dataset = tablib.Dataset(row, headers=headers) + + result = resource.import_data(dataset, dry_run=True, use_transactions=True) + + row_diff = result.rows[0].diff + id_diff = row_diff[0] + # id diff should exist because in rollbacked transaction + # FooBook has been saved + self.assertTrue(id_diff) + + categories_diff = row_diff[8] + self.assertEqual(strip_tags(categories_diff), force_str(cat1.pk)) + + # check that it is really rollbacked + self.assertFalse(Book.objects.filter(name="FooBook")) + + @skipUnlessDBFeature("supports_transactions") + def test_m2m_import_with_transactions_error(self): + resource = ProfileResource() + headers = ["id", "user"] + # 'user' is a required field, the database will raise an error. + row = [None, None] + dataset = tablib.Dataset(row, headers=headers) + + result = resource.import_data(dataset, dry_run=True, use_transactions=True) + + # Ensure the error raised by the database has been saved. + self.assertTrue(result.has_errors()) + + # Ensure the rollback has worked properly. + self.assertEqual(Profile.objects.count(), 0) + + @skipUnlessDBFeature("supports_transactions") + def test_integrity_error_rollback_on_savem2m(self): + # savepoint_rollback() after an IntegrityError gives + # TransactionManagementError (#399) + class CategoryResourceRaisesIntegrityError(CategoryResource): + def save_m2m(self, instance, *args, **kwargs): + # force raising IntegrityError + Category.objects.create(name=instance.name) + + resource = CategoryResourceRaisesIntegrityError() + headers = ["id", "name"] + rows = [ + [None, "foo"], + ] + dataset = tablib.Dataset(*rows, headers=headers) + result = resource.import_data( + dataset, + use_transactions=True, + ) + self.assertTrue(result.has_errors()) + + def test_rollback_on_validation_errors_false(self): + """Should create only one instance as the second one + raises a ``ValidationError``""" + resource = AuthorResource() + headers = ["id", "name", "birthday"] + rows = [ + ["", "A.A.Milne", ""], + ["", "123", "1992test-01-18"], # raises ValidationError + ] + dataset = tablib.Dataset(*rows, headers=headers) + result = resource.import_data( + dataset, + use_transactions=True, + rollback_on_validation_errors=False, + ) + + # Ensure the validation error raised by the database has been saved. + self.assertTrue(result.has_validation_errors()) + + # Ensure that valid row resulted in an instance created. + self.assertEqual(Author.objects.count(), 1) + + def test_rollback_on_validation_errors_true(self): + """ + Should not create any instances as the second one raises a ``ValidationError`` + and ``rollback_on_validation_errors`` flag is set + """ + resource = AuthorResource() + headers = ["id", "name", "birthday"] + rows = [ + ["", "A.A.Milne", ""], + ["", "123", "1992test-01-18"], # raises ValidationError + ] + dataset = tablib.Dataset(*rows, headers=headers) + result = resource.import_data( + dataset, + use_transactions=True, + rollback_on_validation_errors=True, + ) + + # Ensure the validation error raised by the database has been saved. + self.assertTrue(result.has_validation_errors()) + + # Ensure the rollback has worked properly, no instances were created. + self.assertFalse(Author.objects.exists()) diff --git a/tests/core/tests/test_resources/test_modelresource/test_string_resource.py b/tests/core/tests/test_resources/test_modelresource/test_string_resource.py new file mode 100644 index 000000000..579b8a5ad --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_string_resource.py @@ -0,0 +1,16 @@ +from core.models import Book +from django.test import TestCase + +from import_export import resources + + +class BookResourceWithStringModelTest(TestCase): + def setUp(self): + class BookResourceWithStringModel(resources.ModelResource): + class Meta: + model = "core.Book" + + self.resource = BookResourceWithStringModel() + + def test_resource_gets_correct_model_from_string(self): + self.assertEqual(self.resource._meta.model, Book) diff --git a/tests/core/tests/test_resources/test_modelresource/test_widget.py b/tests/core/tests/test_resources/test_modelresource/test_widget.py new file mode 100644 index 000000000..366911344 --- /dev/null +++ b/tests/core/tests/test_resources/test_modelresource/test_widget.py @@ -0,0 +1,19 @@ +from core.tests.resources import BookResource +from django.db.models import CharField, SlugField +from django.test import TestCase + +from import_export import widgets + + +class WidgetFromDjangoFieldTest(TestCase): + def test_widget_from_django_field_for_CharField_returns_CharWidget(self): + f = CharField() + resource = BookResource() + w = resource.widget_from_django_field(f) + self.assertEqual(widgets.CharWidget, w) + + def test_widget_from_django_field_for_CharField_subclass_returns_CharWidget(self): + f = SlugField() + resource = BookResource() + w = resource.widget_from_django_field(f) + self.assertEqual(widgets.CharWidget, w) diff --git a/tests/core/tests/test_resources/test_natural_foreign_key.py b/tests/core/tests/test_resources/test_natural_foreign_key.py new file mode 100644 index 000000000..57f2d9333 --- /dev/null +++ b/tests/core/tests/test_resources/test_natural_foreign_key.py @@ -0,0 +1,64 @@ +import tablib +from core.models import Author, Book +from django.test import TestCase + +from import_export import fields, resources, widgets + + +class BookUsingNaturalKeys(resources.ModelResource): + class Meta: + model = Book + fields = ["name", "author"] + use_natural_foreign_keys = True + + +class BookUsingAuthorNaturalKey(resources.ModelResource): + class Meta: + model = Book + fields = ["name", "author"] + + author = fields.Field( + attribute="author", + column_name="author", + widget=widgets.ForeignKeyWidget( + Author, + use_natural_foreign_keys=True, + ), + ) + + +class TestNaturalKeys(TestCase): + """Tests for issue 1816.""" + + def setUp(self) -> None: + author = Author.objects.create(name="J. R. R. Tolkien") + Book.objects.create(author=author, name="The Hobbit") + self.expected_dataset = tablib.Dataset(headers=["name", "author"]) + row = ["The Hobbit", '["J. R. R. Tolkien"]'] + self.expected_dataset.append(row) + + def test_resource_use_natural_keys(self): + """ + test with ModelResource.Meta.use_natural_foreign_keys=True + Reproduces this problem + """ + resource = BookUsingNaturalKeys() + exported_dataset = resource.export(Book.objects.all()) + self.assertDatasetEqual(self.expected_dataset, exported_dataset) + + def test_field_use_natural_keys(self): + """ + test with ModelResource.field.widget.use_natural_foreign_keys=True + Example of correct behaviour + """ + resource = BookUsingAuthorNaturalKey() + exported_dataset = resource.export(Book.objects.all()) + self.assertDatasetEqual(self.expected_dataset, exported_dataset) + + def assertDatasetEqual(self, expected_dataset, actual_dataset, message=None): + """ + Util for comparing datasets + """ + self.assertEqual(len(expected_dataset), len(actual_dataset), message) + for expected_row, actual_row in zip(expected_dataset, actual_dataset): + self.assertEqual(expected_row, actual_row, message) diff --git a/tests/core/tests/test_resources/test_relationships.py b/tests/core/tests/test_resources/test_relationships.py new file mode 100644 index 000000000..564da3001 --- /dev/null +++ b/tests/core/tests/test_resources/test_relationships.py @@ -0,0 +1,236 @@ +import tablib +from core.models import Book, Category, Person, Role, UUIDBook, UUIDCategory +from core.tests.resources import BookResource +from django.contrib.auth.models import User +from django.test import TestCase + +from import_export import fields, resources, results, widgets + + +class ForeignKeyWidgetFollowRelationship(TestCase): + def setUp(self): + self.user = User.objects.create(username="foo") + self.role = Role.objects.create(user=self.user) + self.person = Person.objects.create(role=self.role) + + def test_export(self): + class MyPersonResource(resources.ModelResource): + role = fields.Field( + column_name="role", + attribute="role", + widget=widgets.ForeignKeyWidget(Role, field="user__username"), + ) + + class Meta: + model = Person + fields = ["id", "role"] + + resource = MyPersonResource() + dataset = resource.export(Person.objects.all()) + self.assertEqual(len(dataset), 1) + self.assertEqual("1", dataset[0][0]) + self.assertEqual("foo", dataset[0][1]) + + self.role.user = None + self.role.save() + + resource = MyPersonResource() + dataset = resource.export(Person.objects.all()) + self.assertEqual(len(dataset), 1) + self.assertEqual("1", dataset[0][0]) + self.assertEqual(None, dataset[0][1]) + + +class ManyRelatedManagerDiffTest(TestCase): + fixtures = ["category", "book", "author"] + + def setUp(self): + pass + + def test_related_manager_diff(self): + dataset_headers = ["id", "name", "categories"] + dataset_row = ["1", "Test Book", "1"] + original_dataset = tablib.Dataset(headers=dataset_headers) + original_dataset.append(dataset_row) + dataset_row[2] = "2" + changed_dataset = tablib.Dataset(headers=dataset_headers) + changed_dataset.append(dataset_row) + + book_resource = BookResource() + export_headers = book_resource.get_export_headers() + + add_result = book_resource.import_data(original_dataset, dry_run=False) + expected_value = '1' + self.check_value(add_result, export_headers, expected_value) + change_result = book_resource.import_data(changed_dataset, dry_run=False) + expected_value = ( + '1' + '2' + ) + self.check_value(change_result, export_headers, expected_value) + + def check_value(self, result, export_headers, expected_value): + self.assertEqual(len(result.rows), 1) + diff = result.rows[0].diff + self.assertEqual(diff[export_headers.index("categories")], expected_value) + + +class ManyToManyWidgetDiffTest(TestCase): + # issue #1270 - ensure ManyToMany fields are correctly checked for + # changes when skip_unchanged=True + fixtures = ["category", "book", "author"] + + def setUp(self): + pass + + def test_many_to_many_widget_create(self): + # the book is associated with 0 categories + # when we import a book with category 1, the book + # should be updated, not skipped + book = Book.objects.first() + book.categories.clear() + dataset_headers = ["id", "name", "categories"] + dataset_row = [book.id, book.name, "1"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + self.assertEqual(0, book.categories.count()) + + result = book_resource.import_data(dataset, dry_run=False) + + book.refresh_from_db() + self.assertEqual(1, book.categories.count()) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertEqual(Category.objects.first(), book.categories.first()) + + def test_many_to_many_widget_create_with_m2m_being_compared(self): + # issue 1558 - when the object is a new instance and m2m is + # evaluated for differences + dataset_headers = ["id", "categories"] + dataset_row = ["1", "1"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + + result = book_resource.import_data(dataset, dry_run=False) + + self.assertFalse(result.has_errors()) + self.assertEqual(len(result.rows), 1) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_NEW) + + def test_many_to_many_widget_update(self): + # the book is associated with 1 category ('Category 2') + # when we import a book with category 1, the book + # should be updated, not skipped, so that Category 2 is replaced by Category 1 + book = Book.objects.first() + dataset_headers = ["id", "name", "categories"] + dataset_row = [book.id, book.name, "1"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + self.assertEqual(1, book.categories.count()) + + result = book_resource.import_data(dataset, dry_run=False) + self.assertEqual( + result.rows[0].import_type, results.RowResult.IMPORT_TYPE_UPDATE + ) + self.assertEqual(1, book.categories.count()) + self.assertEqual(Category.objects.first(), book.categories.first()) + + def test_many_to_many_widget_no_changes(self): + # the book is associated with 1 category ('Category 2') + # when we import a row with a book with category 1, the book + # should be skipped, because there is no change + book = Book.objects.first() + dataset_headers = ["id", "name", "categories"] + dataset_row = [book.id, book.name, book.categories.first().id] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + + self.assertEqual(1, book.categories.count()) + result = book_resource.import_data(dataset, dry_run=False) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + self.assertEqual(1, book.categories.count()) + + def test_many_to_many_widget_handles_ordering(self): + # the book is associated with 2 categories ('Category 1', 'Category 2') + # when we import a row with a book with both categories (in any order), the book + # should be skipped, because there is no change + book = Book.objects.first() + self.assertEqual(1, book.categories.count()) + cat1 = Category.objects.get(name="Category 1") + cat2 = Category.objects.get(name="Category 2") + book.categories.add(cat1) + book.save() + self.assertEqual(2, book.categories.count()) + dataset_headers = ["id", "name", "categories"] + + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + + # import with natural order + dataset_row = [book.id, book.name, f"{cat1.id},{cat2.id}"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + result = book_resource.import_data(dataset, dry_run=False) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + + # import with reverse order + dataset_row = [book.id, book.name, f"{cat2.id},{cat1.id}"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + result = book_resource.import_data(dataset, dry_run=False) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + + self.assertEqual(2, book.categories.count()) + + def test_many_to_many_widget_handles_uuid(self): + # Test for #1435 - skip_row() handles M2M field when UUID pk used + class _UUIDBookResource(resources.ModelResource): + class Meta: + model = UUIDBook + + uuid_resource = _UUIDBookResource() + uuid_resource._meta.skip_unchanged = True + cat1 = UUIDCategory.objects.create(name="Category 1") + cat2 = UUIDCategory.objects.create(name="Category 2") + uuid_book = UUIDBook.objects.create(name="uuid book") + uuid_book.categories.add(cat1, cat2) + uuid_book.save() + + dataset_headers = ["id", "name", "categories"] + dataset_row = [uuid_book.id, uuid_book.name, f"{cat1.catid},{cat2.catid}"] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + result = uuid_resource.import_data(dataset, dry_run=False) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + + def test_skip_row_no_m2m_data_supplied(self): + # issue #1437 + # test skip_row() when the model defines a m2m field + # but it is not present in the dataset + book = Book.objects.first() + dataset_headers = ["id", "name"] + dataset_row = [book.id, book.name] + dataset = tablib.Dataset(headers=dataset_headers) + dataset.append(dataset_row) + + book_resource = BookResource() + book_resource._meta.skip_unchanged = True + + self.assertEqual(1, book.categories.count()) + result = book_resource.import_data(dataset, dry_run=False) + self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP) + self.assertEqual(1, book.categories.count()) diff --git a/tests/core/tests/test_results.py b/tests/core/tests/test_results.py index bf27ea973..488abbf51 100644 --- a/tests/core/tests/test_results.py +++ b/tests/core/tests/test_results.py @@ -1,33 +1,95 @@ +from unittest.mock import patch + from core.models import Book from django.core.exceptions import ValidationError -from django.test.testcases import TestCase +from django.test import SimpleTestCase from tablib import Dataset -from import_export.results import Error, Result, RowResult +from import_export.results import Error, InvalidRow, Result, RowResult + + +class ErrorTest(SimpleTestCase): + def test_repr_no_details(self): + try: + 1 / 0 + except Exception as exc: + error = Error(exc) + + self.assertEqual(repr(error), "") + + def test_repr_all_details(self): + try: + 1 / 0 + except Exception as exc: + error = Error(exc, row=1, number=2) + + self.assertEqual( + repr(error), + "", + ) + + def test_traceback(self): + try: + 1 / 0 + except Exception as exc: + error = Error(exc) + + self.assertTrue( + error.traceback.startswith("Traceback (most recent call last):\n") + ) + self.assertIn( + "ZeroDivisionError: division by zero\n", + error.traceback, + ) -class ResultTest(TestCase): +class InvalidRowTest(SimpleTestCase): + def test_repr(self): + try: + raise ValidationError(message="invalid row") + except ValidationError as exc: + error = InvalidRow(validation_error=exc, number=1, values={}) + self.assertEqual( + repr(error), + "", + ) + + +class ResultTest(SimpleTestCase): def setUp(self): self.result = Result() - headers = ['id', 'book_name'] - rows = [(1, 'Some book')] + headers = ["id", "book_name"] + rows = [(1, "Some book")] self.dataset = Dataset(*rows, headers=headers) def test_add_dataset_headers(self): - target = ['Error'] + target = ["some_header", "Error"] + self.result.add_dataset_headers(["some_header"]) + self.assertEqual(target, self.result.failed_dataset.headers) + + def test_add_dataset_headers_empty_list(self): + target = ["Error"] self.result.add_dataset_headers([]) self.assertEqual(target, self.result.failed_dataset.headers) + def test_add_dataset_headers_None(self): + target = ["Error"] + self.result.add_dataset_headers(None) + self.assertEqual(target, self.result.failed_dataset.headers) + def test_result_append_failed_row_with_ValidationError(self): - target = [[1, 'Some book', "['some error']"]] - self.result.append_failed_row(self.dataset.dict[0], ValidationError('some error')) + target = [[1, "Some book", "['some error']"]] + self.result.append_failed_row( + self.dataset.dict[0], ValidationError("some error") + ) self.assertEqual(target, self.result.failed_dataset.dict) def test_result_append_failed_row_with_wrapped_error(self): - target = [[1, 'Some book', "['some error']"]] + target = [[1, "Some book", "['some error']"]] row_result = RowResult() - error = Error(ValidationError('some error')) + error = Error(ValidationError("some error")) row_result.errors = [error] self.result.append_failed_row(self.dataset.dict[0], row_result.errors[0]) self.assertEqual(target, self.result.failed_dataset.dict) @@ -45,11 +107,65 @@ def test_add_instance_info_no_instance_pk(self): self.assertEqual("", row_result.object_repr) def test_add_instance_info(self): - class BookWithObjectRepr(Book): + row_result = RowResult() + row_result.add_instance_info(Book(pk=1, name="some book")) + self.assertEqual(1, row_result.object_id) + self.assertEqual("some book", row_result.object_repr) + + @patch("import_export.results.logger") + def test_add_instance_info_instance_unserializable(self, mock_logger): + # issue 1763 + class UnserializableBook: + # will raise TypeError def __str__(self): - return self.name + return None row_result = RowResult() - row_result.add_instance_info(BookWithObjectRepr(pk=1, name="some book")) - self.assertEqual(1, row_result.object_id) - self.assertEqual("some book", row_result.object_repr) \ No newline at end of file + row_result.add_instance_info(UnserializableBook()) + mock_logger.debug.assert_called_with( + "call to force_str() on instance failed: " + "__str__ returned non-string (type NoneType)" + ) + self.assertEqual(None, row_result.object_repr) + + def test_is_new(self): + row_result = RowResult() + self.assertFalse(row_result.is_new()) + row_result.import_type = RowResult.IMPORT_TYPE_NEW + self.assertTrue(row_result.is_new()) + self.assertTrue(row_result.is_valid()) + + def test_is_update(self): + row_result = RowResult() + self.assertFalse(row_result.is_update()) + row_result.import_type = RowResult.IMPORT_TYPE_UPDATE + self.assertTrue(row_result.is_update()) + self.assertTrue(row_result.is_valid()) + + def test_is_skip(self): + row_result = RowResult() + self.assertFalse(row_result.is_skip()) + row_result.import_type = RowResult.IMPORT_TYPE_SKIP + self.assertTrue(row_result.is_skip()) + self.assertTrue(row_result.is_valid()) + + def test_is_delete(self): + row_result = RowResult() + self.assertFalse(row_result.is_delete()) + row_result.import_type = RowResult.IMPORT_TYPE_DELETE + self.assertTrue(row_result.is_delete()) + self.assertTrue(row_result.is_valid()) + + def test_is_error(self): + row_result = RowResult() + self.assertFalse(row_result.is_error()) + row_result.import_type = RowResult.IMPORT_TYPE_ERROR + self.assertTrue(row_result.is_error()) + self.assertFalse(row_result.is_valid()) + + def test_is_invalid(self): + row_result = RowResult() + self.assertFalse(row_result.is_invalid()) + row_result.import_type = RowResult.IMPORT_TYPE_INVALID + self.assertTrue(row_result.is_invalid()) + self.assertFalse(row_result.is_valid()) diff --git a/tests/core/tests/test_tmp_storages.py b/tests/core/tests/test_tmp_storages.py index cf4a0dce8..b187bbad4 100644 --- a/tests/core/tests/test_tmp_storages.py +++ b/tests/core/tests/test_tmp_storages.py @@ -1,8 +1,11 @@ +import io import os +from unittest.mock import mock_open, patch from django.core.cache import cache -from django.core.files.storage import default_storage +from django.core.files.storage import FileSystemStorage, default_storage from django.test import TestCase +from django.test.utils import override_settings from import_export.tmp_storages import ( BaseStorage, @@ -13,7 +16,6 @@ class TestBaseStorage(TestCase): - def setUp(self): self.storage = BaseStorage() @@ -30,9 +32,17 @@ def test_remove(self): self.storage.remove() +class TestTempFolderStorage(TempFolderStorage): + def get_full_path(self): + return "/tmp/f" + + +class TestMediaStorage(MediaStorage): + def get_full_path(self): + return "f" -class TempStoragesTest(TestCase): +class TempStoragesTest(TestCase): def setUp(self): self.test_string = b""" id,name,author,author_email,imported,published,price,categories @@ -52,6 +62,13 @@ def test_temp_folder_storage(self): tmp_storage.remove() self.assertFalse(os.path.isfile(tmp_storage.get_full_path())) + def test_temp_folder_storage_read_with_encoding(self): + tmp_storage = TestTempFolderStorage(encoding="utf-8") + tmp_storage.name = "f" + with patch("builtins.open", mock_open(read_data="data")) as mock_file: + tmp_storage.read() + mock_file.assert_called_with("/tmp/f", "r", encoding="utf-8") + def test_cache_storage(self): tmp_storage = CacheStorage() tmp_storage.save(self.test_string) @@ -60,10 +77,23 @@ def test_cache_storage(self): tmp_storage = CacheStorage(name=name) self.assertEqual(self.test_string, tmp_storage.read()) - self.assertNotEqual(cache.get(tmp_storage.CACHE_PREFIX, - tmp_storage.name), None) + self.assertIsNotNone(cache.get(tmp_storage.CACHE_PREFIX + tmp_storage.name)) tmp_storage.remove() - self.assertEqual(cache.get(tmp_storage.name), None) + self.assertIsNone(cache.get(tmp_storage.CACHE_PREFIX + tmp_storage.name)) + + def test_cache_storage_read_with_encoding(self): + tmp_storage = CacheStorage() + tmp_storage.name = "f" + cache.set("django-import-export-f", 101) + res = tmp_storage.read() + self.assertEqual(101, res) + + def test_cache_storage_read_with_encoding_unicode_chars(self): + tmp_storage = CacheStorage() + tmp_storage.name = "f" + tmp_storage.save("àèìòùçñ") + res = tmp_storage.read() + self.assertEqual("àèìòùçñ", res) def test_media_storage(self): tmp_storage = MediaStorage() @@ -77,14 +107,63 @@ def test_media_storage(self): tmp_storage.remove() self.assertFalse(default_storage.exists(tmp_storage.get_full_path())) - def test_media_storage_read_mode(self): - # issue 416 - MediaStorage does not respect the read_mode parameter. - test_string = self.test_string.replace(b'\n', b'\r') - + def test_media_storage_read_with_encoding(self): + tmp_storage = TestMediaStorage() + tmp_storage.name = "f" + with patch.object(FileSystemStorage, "open") as mock_open: + tmp_storage.read() + mock_open.assert_called_with("f", mode="rb") + + +class CustomizedStorage: + save_count = 0 + open_count = 0 + delete_count = 0 + + def __init__(self, **kwargs): + pass + + def save(self, path, data): + self.save_count += 1 + + def open(self, path, mode=None): + self.open_count += 1 + return io.StringIO("a") + + def delete(self, path): + self.delete_count += 1 + + +class CustomizedMediaStorageTestDjango(TestCase): + @override_settings( + STORAGES={ + "import_export": { + "BACKEND": "tests.core.tests.test_tmp_storages.CustomizedStorage" + } + } + ) + def test_MediaStorage_uses_custom_storage_implementation(self): + tmp_storage = TestMediaStorage() + tmp_storage.save(b"a") + self.assertEqual(1, tmp_storage._storage.save_count) + tmp_storage.read() + self.assertEqual(1, tmp_storage._storage.open_count) + tmp_storage.remove() + self.assertEqual(1, tmp_storage._storage.delete_count) + + @override_settings( + STORAGES={ + "import_export": { + "BACKEND": "tests.core.tests.test_tmp_storages.CustomizedStorage" + } + } + ) + def test_disable_media_folder(self): + tmp_storage = MediaStorage(MEDIA_FOLDER=None) + tmp_storage.name = "TESTNAME" + self.assertIsNone(tmp_storage.MEDIA_FOLDER) + self.assertEqual("TESTNAME", tmp_storage.get_full_path()) + + def test_media_folder(self): tmp_storage = MediaStorage() - tmp_storage.save(test_string) - name = tmp_storage.name - - tmp_storage = MediaStorage(name=name) - self.assertEqual(self.test_string.decode(), - tmp_storage.read(read_mode='r')) + self.assertEqual("django-import-export", tmp_storage.MEDIA_FOLDER) diff --git a/tests/core/tests/test_widgets.py b/tests/core/tests/test_widgets.py index 01a8b7085..5d778ba82 100644 --- a/tests/core/tests/test_widgets.py +++ b/tests/core/tests/test_widgets.py @@ -1,18 +1,65 @@ +import json from datetime import date, datetime, time, timedelta from decimal import Decimal from unittest import mock +from unittest.mock import patch -import pytz -from core.models import Author, Category +import django +from core.models import Author, Book, Category +from core.tests.utils import ignore_utcnow_deprecation_warning from django.test import TestCase from django.test.utils import override_settings from django.utils import timezone from import_export import widgets +from import_export.exceptions import WidgetError -class BooleanWidgetTest(TestCase): +class WidgetTest(TestCase): + def setUp(self): + self.widget = widgets.Widget() + + def test_clean(self): + self.assertEqual("a", self.widget.clean("a")) + + def test_render(self): + self.assertEqual("1", self.widget.render(1)) + + +class RowDeprecationTestMixin: + def test_render_row_deprecation(self): + with self.assertWarnsRegex( + DeprecationWarning, + r"^The 'obj' parameter is deprecated and " + "will be removed in a future release$", + ): + self.widget.render(Book.objects.none(), obj={"a": 1}) + +class CharWidgetTest(TestCase, RowDeprecationTestMixin): + def setUp(self): + self.widget = widgets.CharWidget() + + def test_clean(self): + self.assertEqual("a", self.widget.clean("a")) + + def test_render(self): + self.assertEqual("1", self.widget.render(1)) + + def test_render_no_coerce_to_string(self): + self.widget = widgets.CharWidget(coerce_to_string=False) + self.assertEqual(1, self.widget.render(1)) + + def test_clean_with_allow_blank_is_False(self): + self.widget = widgets.CharWidget(allow_blank=False) + self.assertIsNone(self.widget.clean(None)) + + def test_clean_with_allow_blank_is_True(self): + self.widget = widgets.CharWidget(allow_blank=True) + self.assertEqual("", self.widget.clean(None)) + + +class BooleanWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.widget = widgets.BooleanWidget() @@ -41,19 +88,55 @@ def test_render(self): self.assertEqual(self.widget.render(False), "0") self.assertEqual(self.widget.render(None), "") + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.BooleanWidget(coerce_to_string=False) + self.assertTrue(self.widget.render(True)) + self.assertFalse(self.widget.render(False)) + self.assertIsNone(self.widget.render(None)) + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render("a"), "") + + +class FormatDatetimeTest(TestCase): + date = date(10, 8, 2) + target_dt = "02.08.0010" + format = "%d.%m.%Y" + + def test_format_datetime_gte_django4(self): + self.assertEqual( + self.target_dt, widgets.format_datetime(self.date, self.format) + ) + + +class CustomDate(date): + """test derived instance of date""" -class DateWidgetTest(TestCase): + pass + +class DateWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.date = date(2012, 8, 13) - self.widget = widgets.DateWidget('%d.%m.%Y') + self.widget = widgets.DateWidget("%d.%m.%Y") def test_render(self): self.assertEqual(self.widget.render(self.date), "13.08.2012") + def test_render_derived_date(self): + derived_date = CustomDate(2012, 8, 13) + self.assertEqual(self.widget.render(derived_date), "13.08.2012") + def test_render_none(self): self.assertEqual(self.widget.render(None), "") + def test_render_invalid_type(self): + self.assertEqual(self.widget.render(int(1)), "") + + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.DateWidget(coerce_to_string=False) + self.assertEqual(self.date, self.widget.render(self.date)) + def test_render_datetime_safe(self): """datetime_safe is supposed to be used to support dates older than 1000""" self.date = date(10, 8, 2) @@ -68,10 +151,16 @@ def test_clean_returns_None_for_empty_value(self): def test_clean_returns_date_when_date_passed(self): self.assertEqual(self.date, self.widget.clean(self.date)) - def test_clean_raises_ValueError(self): - self.widget = widgets.DateWidget('x') - with self.assertRaisesRegex(ValueError, "Enter a valid date."): - self.widget.clean('2021-05-01') + @patch("import_export.widgets.logger") + def test_clean_raises_ValueError(self, mock_logger): + self.widget = widgets.DateWidget("x") + with self.assertRaisesRegex( + ValueError, "Value could not be parsed using defined formats." + ): + self.widget.clean("2021-05-01") + mock_logger.debug.assert_called_with( + "time data '2021-05-01' does not match format 'x'" + ) @override_settings(USE_TZ=True) def test_use_tz(self): @@ -84,29 +173,80 @@ def test_default_format(self): self.assertEqual(("%Y-%m-%d",), self.widget.formats) -class DateTimeWidgetTest(TestCase): +class CustomDateTime(datetime): + """test derived instance of datetime""" + pass + + +class DateTimeWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.datetime = datetime(2012, 8, 13, 18, 0, 0) - self.widget = widgets.DateTimeWidget('%d.%m.%Y %H:%M:%S') + self.widget = widgets.DateTimeWidget("%d.%m.%Y %H:%M:%S") def test_render(self): - self.assertEqual(self.widget.render(self.datetime), - "13.08.2012 18:00:00") + self.assertEqual(self.widget.render(self.datetime), "13.08.2012 18:00:00") + + def test_render_derived_datetime(self): + derived_datetime = CustomDateTime(2012, 8, 13, 18, 0, 0) + self.assertEqual(self.widget.render(derived_datetime), "13.08.2012 18:00:00") def test_render_none(self): self.assertEqual(self.widget.render(None), "") - def test_clean(self): - self.assertEqual(self.widget.clean("13.08.2012 18:00:00"), - self.datetime) + def test_render_invalid_type(self): + self.assertEqual(self.widget.render(int(1)), "") + + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.DateTimeWidget(coerce_to_string=False) + self.assertEqual(self.datetime, self.widget.render(self.datetime)) - @override_settings(USE_TZ=True, TIME_ZONE='Europe/Ljubljana') + def test_clean(self): + self.assertEqual(self.widget.clean("13.08.2012 18:00:00"), self.datetime) + + @patch("import_export.widgets.logger") + def test_clean_raises_ValueError(self, mock_logger): + self.widget = widgets.DateTimeWidget("x") + with self.assertRaisesRegex( + ValueError, "Value could not be parsed using defined formats." + ): + self.widget.clean("2021-05-01") + mock_logger.debug.assert_called_with( + "time data '2021-05-01' does not match format 'x'" + ) + + @ignore_utcnow_deprecation_warning + @override_settings(USE_TZ=True, TIME_ZONE="Europe/Ljubljana") def test_use_tz(self): + import pytz + utc_dt = timezone.make_aware(self.datetime, pytz.UTC) self.assertEqual(self.widget.render(utc_dt), "13.08.2012 20:00:00") self.assertEqual(self.widget.clean("13.08.2012 20:00:00"), utc_dt) + @ignore_utcnow_deprecation_warning + @override_settings(USE_TZ=True, TIME_ZONE="Europe/Ljubljana") + def test_clean_returns_tz_aware_datetime_when_naive_datetime_passed(self): + import pytz + + # issue 1165 + if django.VERSION >= (5, 0): + from zoneinfo import ZoneInfo + + tz = ZoneInfo("Europe/Ljubljana") + else: + tz = pytz.timezone("Europe/Ljubljana") + target_dt = timezone.make_aware(self.datetime, tz) + self.assertEqual(target_dt, self.widget.clean(self.datetime)) + + @ignore_utcnow_deprecation_warning + @override_settings(USE_TZ=True, TIME_ZONE="Europe/Ljubljana") + def test_clean_handles_tz_aware_datetime(self): + import pytz + + self.datetime = datetime(2012, 8, 13, 18, 0, 0, tzinfo=pytz.timezone("UTC")) + self.assertEqual(self.datetime, self.widget.clean(self.datetime)) + @override_settings(DATETIME_INPUT_FORMATS=None) def test_default_format(self): self.widget = widgets.DateTimeWidget() @@ -123,9 +263,10 @@ def test_render_datetime_safe(self): class DateWidgetBefore1900Test(TestCase): """https://github.com/django-import-export/django-import-export/pull/94""" + def setUp(self): self.date = date(1868, 8, 13) - self.widget = widgets.DateWidget('%d.%m.%Y') + self.widget = widgets.DateWidget("%d.%m.%Y") def test_render(self): self.assertEqual(self.widget.render(self.date), "13.08.1868") @@ -135,10 +276,9 @@ def test_clean(self): class DateTimeWidgetBefore1900Test(TestCase): - def setUp(self): self.datetime = datetime(1868, 8, 13) - self.widget = widgets.DateTimeWidget('%d.%m.%Y') + self.widget = widgets.DateTimeWidget("%d.%m.%Y") def test_render(self): self.assertEqual("13.08.1868", self.widget.render(self.datetime)) @@ -147,18 +287,34 @@ def test_clean(self): self.assertEqual(self.datetime, self.widget.clean("13.08.1868")) -class TimeWidgetTest(TestCase): +class CustomTime(time): + """test derived instance of time""" + pass + + +class TimeWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.time = time(20, 15, 0) - self.widget = widgets.TimeWidget('%H:%M:%S') + self.widget = widgets.TimeWidget("%H:%M:%S") def test_render(self): self.assertEqual(self.widget.render(self.time), "20:15:00") + def test_render_derived_time(self): + derived_time = CustomTime(20, 15, 0) + self.assertEqual(self.widget.render(derived_time), "20:15:00") + def test_render_none(self): self.assertEqual(self.widget.render(None), "") + def test_render_invalid_type(self): + self.assertEqual(self.widget.render(int(1)), "") + + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.TimeWidget(coerce_to_string=False) + self.assertEqual(self.time, self.widget.render(self.time)) + def test_clean(self): self.assertEqual(self.widget.clean("20:15:00"), self.time) @@ -167,16 +323,22 @@ def test_default_format(self): self.widget = widgets.TimeWidget() self.assertEqual(("%H:%M:%S",), self.widget.formats) - def test_clean_raises_ValueError(self): - self.widget = widgets.TimeWidget('x') - with self.assertRaisesRegex(ValueError, "Enter a valid time."): + @patch("import_export.widgets.logger") + def test_clean_raises_ValueError(self, mock_logger): + self.widget = widgets.TimeWidget("x") + with self.assertRaisesRegex( + ValueError, "Value could not be parsed using defined formats." + ): self.widget.clean("20:15:00") + mock_logger.debug.assert_called_with( + "time data '20:15:00' does not match format 'x'" + ) def test_clean_returns_time_when_time_passed(self): self.assertEqual(self.time, self.widget.clean(self.time)) -class DurationWidgetTest(TestCase): +class DurationWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.duration = timedelta(hours=1, minutes=57, seconds=0) self.widget = widgets.DurationWidget() @@ -190,6 +352,13 @@ def test_render_none(self): def test_render_zero(self): self.assertEqual(self.widget.render(timedelta(0)), "0:00:00") + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.DurationWidget(coerce_to_string=False) + self.assertEqual(self.duration, self.widget.render(self.duration)) + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render(int(1)), "") + def test_clean(self): self.assertEqual(self.widget.clean("1:57:00"), self.duration) @@ -199,23 +368,79 @@ def test_clean_none(self): def test_clean_zero(self): self.assertEqual(self.widget.clean("0:00:00"), timedelta(0)) - @mock.patch("import_export.widgets.parse_duration", side_effect=ValueError("err")) - def test_clean_raises_ValueError(self, _): - with self.assertRaisesRegex(ValueError, "Enter a valid duration."): + @patch("import_export.widgets.parse_duration", side_effect=ValueError("err")) + @patch("import_export.widgets.logger") + def test_clean_raises_ValueError(self, mock_logger, _): + with self.assertRaisesRegex(ValueError, "Value could not be parsed."): self.widget.clean("x") + mock_logger.debug.assert_called_with("err") + + +class NumberWidgetTest(TestCase, RowDeprecationTestMixin): + def setUp(self): + self.value = 11.111 + self.widget = widgets.NumberWidget() + self.widget_coerce_to_string = widgets.NumberWidget(coerce_to_string=True) + + def test_is_empty_value_is_none(self): + self.assertTrue(self.widget.is_empty(None)) + + def test_is_empty_value_is_empty_string(self): + self.assertTrue(self.widget.is_empty("")) + + def test_is_empty_value_is_whitespace(self): + self.assertTrue(self.widget.is_empty(" ")) + + def test_is_empty_value_is_zero(self): + self.assertFalse(self.widget.is_empty(0)) + + def test_render(self): + self.assertEqual("11.111", self.widget.render(self.value)) + + def test_render_None_coerce_to_string_False(self): + self.assertEqual("", self.widget.render(None)) + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render("a"), "") + @override_settings(LANGUAGE_CODE="fr-fr") + def test_locale_render_coerce_to_string_gte4(self): + self.assertEqual("11,111", self.widget_coerce_to_string.render(self.value)) -class FloatWidgetTest(TestCase): + def test_coerce_to_string_value_is_None(self): + self.assertEqual("", self.widget_coerce_to_string.render(None)) + +class FloatWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.value = 11.111 self.widget = widgets.FloatWidget() + self.widget_coerce_to_string = widgets.FloatWidget(coerce_to_string=True) def test_clean(self): self.assertEqual(self.widget.clean(11.111), self.value) + @override_settings(USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators(self): + self.assertEqual(self.widget.clean("1,234.5"), 1234.5) + + @override_settings(LANGUAGE_CODE="ar", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_arabic(self): + self.assertEqual(self.widget.clean("1.234,5"), 1234.5) + + @override_settings(LANGUAGE_CODE="zh-hans", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_chinese_simplified(self): + self.assertEqual(self.widget.clean("1234.5"), 1234.5) + + @override_settings(LANGUAGE_CODE="fr", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_french(self): + self.assertEqual(self.widget.clean("1\xa0234,5"), 1234.5) + def test_render(self): - self.assertEqual(self.widget.render(self.value), self.value) + self.assertEqual(self.widget.render(self.value), "11.111") + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render("a"), "") def test_clean_string_zero(self): self.assertEqual(self.widget.clean("0"), 0.0) @@ -226,9 +451,12 @@ def test_clean_empty_string(self): self.assertEqual(self.widget.clean(" "), None) self.assertEqual(self.widget.clean("\r\n\t"), None) + @override_settings(LANGUAGE_CODE="fr-fr") + def test_locale_render_coerce_to_string_gte4(self): + self.assertEqual(self.widget_coerce_to_string.render(self.value), "11,111") -class DecimalWidgetTest(TestCase): +class DecimalWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.value = Decimal("11.111") self.widget = widgets.DecimalWidget() @@ -237,9 +465,32 @@ def test_clean(self): self.assertEqual(self.widget.clean("11.111"), self.value) self.assertEqual(self.widget.clean(11.111), self.value) - def test_render(self): + @override_settings(USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators(self): + self.assertEqual(self.widget.clean("1,234.5"), Decimal("1234.5")) + + @override_settings(LANGUAGE_CODE="ar", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_arabic(self): + self.assertEqual(self.widget.clean("1.234,5"), Decimal("1234.5")) + + @override_settings(LANGUAGE_CODE="zh-hans", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_chinese_simplified(self): + self.assertEqual(self.widget.clean("1234.5"), Decimal("1234.5")) + + @override_settings(LANGUAGE_CODE="fr", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_french(self): + self.assertEqual(self.widget.clean("1\xa0234,5"), Decimal("1234.5")) + + def test_render_coerce_to_string_is_False(self): + self.widget = widgets.DecimalWidget(coerce_to_string=False) self.assertEqual(self.widget.render(self.value), self.value) + def test_render(self): + self.assertEqual(self.widget.render(self.value), "11.111") + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render("1"), "") + def test_clean_string_zero(self): self.assertEqual(self.widget.clean("0"), Decimal("0")) self.assertEqual(self.widget.clean("0.0"), Decimal("0")) @@ -249,13 +500,17 @@ def test_clean_empty_string(self): self.assertEqual(self.widget.clean(" "), None) self.assertEqual(self.widget.clean("\r\n\t"), None) + @override_settings(LANGUAGE_CODE="fr-fr") + def test_locale_render_coerce_to_string_gte4(self): + self.assertEqual(self.widget.render(self.value), "11,111") -class IntegerWidgetTest(TestCase): +class IntegerWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.value = 0 self.widget = widgets.IntegerWidget() self.bigintvalue = 163371428940853127 + self.widget_coerce_to_string = widgets.IntegerWidget(coerce_to_string=True) def test_clean_integer_zero(self): self.assertEqual(self.widget.clean(0), self.value) @@ -272,12 +527,41 @@ def test_clean_empty_string(self): self.assertEqual(self.widget.clean(" "), None) self.assertEqual(self.widget.clean("\n\t\r"), None) + @override_settings(USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators(self): + self.assertEqual(self.widget.clean("1,234.5"), 1234) + + @override_settings(LANGUAGE_CODE="ar", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_arabic(self): + self.assertEqual(self.widget.clean("1.234,5"), 1234) + + @override_settings(LANGUAGE_CODE="zh-hans", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_chinese_simplified(self): + self.assertEqual(self.widget.clean("1234.5"), 1234) + + @override_settings(LANGUAGE_CODE="fr", USE_THOUSAND_SEPARATOR=True) + def test_clean_numeric_separators_french(self): + self.assertEqual(self.widget.clean("1\xa0234,5"), 1234) + + def test_render_invalid_type(self): + self.assertEqual(self.widget.render("a"), "") -class ForeignKeyWidgetTest(TestCase): + @override_settings(LANGUAGE_CODE="fr-fr") + def test_locale_render_gte_django4(self): + self.assertEqual(self.widget_coerce_to_string.render(self.value), "0") + +class ForeignKeyWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.widget = widgets.ForeignKeyWidget(Author) - self.author = Author.objects.create(name='Foo') + self.natural_key_author_widget = widgets.ForeignKeyWidget( + Author, use_natural_foreign_keys=True + ) + self.natural_key_book_widget = widgets.ForeignKeyWidget( + Book, use_natural_foreign_keys=True + ) + self.author = Author.objects.create(name="Foo") + self.book = Book.objects.create(name="Bar", author=self.author) def test_clean(self): self.assertEqual(self.widget.clean(self.author.id), self.author) @@ -293,19 +577,41 @@ def test_render_empty(self): def test_clean_multi_column(self): class BirthdayWidget(widgets.ForeignKeyWidget): - def get_queryset(self, value, row): - return self.model.objects.filter( - birthday=row['birthday'] - ) - author2 = Author.objects.create(name='Foo') + def get_queryset(self, value, row, *args, **kwargs): + return self.model.objects.filter(birthday=row["birthday"]) + + author2 = Author.objects.create(name="Foo") author2.birthday = "2016-01-01" author2.save() - birthday_widget = BirthdayWidget(Author, 'name') - row = {'name': "Foo", 'birthday': author2.birthday} - self.assertEqual(birthday_widget.clean("Foo", row), author2) + birthday_widget = BirthdayWidget(Author, "name") + row_dict = {"name": "Foo", "birthday": author2.birthday} + self.assertEqual(birthday_widget.clean("Foo", row=row_dict), author2) + + def test_invalid_get_queryset(self): + class BirthdayWidget(widgets.ForeignKeyWidget): + def get_queryset(self, value, row): + return self.model.objects.filter(birthday=row["birthday"]) + + birthday_widget = BirthdayWidget(Author, "name") + row_dict = {"name": "Foo", "age": 38} + with self.assertRaises(TypeError): + birthday_widget.clean("Foo", row=row_dict, row_number=1) + + def test_lookup_multiple_columns(self): + # issue 1516 - override the values used to lookup an entry + class BirthdayWidget(widgets.ForeignKeyWidget): + def get_lookup_kwargs(self, value, row, *args, **kwargs): + return {"name": row["name"], "birthday": row["birthday"]} + + target_author = Author.objects.create(name="James Joyce", birthday="1882-02-02") + row_dict = {"name": "James Joyce", "birthday": "1882-02-02"} + birthday_widget = BirthdayWidget(Author, "name") + # prove that the overridden kwargs identify a row + res = birthday_widget.clean("non-existent name", row=row_dict) + self.assertEqual(target_author, res) def test_render_handles_value_error(self): - class TestObj(object): + class TestObj: @property def attr(self): raise ValueError("some error") @@ -314,32 +620,81 @@ def attr(self): self.widget = widgets.ForeignKeyWidget(mock.Mock(), "attr") self.assertIsNone(self.widget.render(t)) - - -class ManyToManyWidget(TestCase): - + def test_author_natural_key_clean(self): + """ + Ensure that we can import an author by its natural key. Note that + this will always need to be an iterable. + Generally this will be rendered as a list. + """ + self.assertEqual( + self.natural_key_author_widget.clean(json.dumps(self.author.natural_key())), + self.author, + ) + + def test_author_natural_key_render(self): + """ + Ensure we can render an author by its natural key. Natural keys will always be + tuples. + """ + self.assertEqual( + self.natural_key_author_widget.render(self.author), + json.dumps(self.author.natural_key()), + ) + + def test_book_natural_key_clean(self): + """ + Use the book case to validate a composite natural key of book name and author + can be cleaned. + """ + self.assertEqual( + self.natural_key_book_widget.clean(json.dumps(self.book.natural_key())), + self.book, + ) + + def test_book_natural_key_render(self): + """ + Use the book case to validate a composite natural key of book name and author + can be rendered + """ + self.assertEqual( + self.natural_key_book_widget.render(self.book), + json.dumps(self.book.natural_key()), + ) + + def test_natural_foreign_key_with_key_is_id(self): + with self.assertRaises(WidgetError) as e: + widgets.ForeignKeyWidget( + Author, use_natural_foreign_keys=True, key_is_id=True + ) + self.assertEqual( + "use_natural_foreign_keys and key_is_id " "cannot both be True", + str(e.exception), + ) + + +class ManyToManyWidget(TestCase, RowDeprecationTestMixin): def setUp(self): self.widget = widgets.ManyToManyWidget(Category) self.widget_name = widgets.ManyToManyWidget(Category, field="name") - self.cat1 = Category.objects.create(name='Cat úňíčóďě') - self.cat2 = Category.objects.create(name='Cat 2') + self.cat1 = Category.objects.create(name="Cat úňíčóďě") + self.cat2 = Category.objects.create(name="Cat 2") def test_clean(self): - value = "%s,%s" % (self.cat1.pk, self.cat2.pk) + value = f"{self.cat1.pk},{self.cat2.pk}" cleaned_data = self.widget.clean(value) self.assertEqual(len(cleaned_data), 2) self.assertIn(self.cat1, cleaned_data) self.assertIn(self.cat2, cleaned_data) def test_clean_field(self): - value = "%s,%s" % (self.cat1.name, self.cat2.name) + value = f"{self.cat1.name},{self.cat2.name}" cleaned_data = self.widget_name.clean(value) self.assertEqual(len(cleaned_data), 2) self.assertIn(self.cat1, cleaned_data) self.assertIn(self.cat2, cleaned_data) def test_clean_field_spaces(self): - value = "%s, %s" % (self.cat1.name, self.cat2.name) + value = f"{self.cat1.name}, {self.cat2.name}" cleaned_data = self.widget_name.clean(value) self.assertEqual(len(cleaned_data), 2) self.assertIn(self.cat1, cleaned_data) @@ -369,14 +724,20 @@ def test_float(self): self.assertIn(self.cat1, cleaned_data) def test_render(self): - self.assertEqual(self.widget.render(Category.objects.order_by('id')), - "%s,%s" % (self.cat1.pk, self.cat2.pk)) - self.assertEqual(self.widget_name.render(Category.objects.order_by('id')), - "%s,%s" % (self.cat1.name, self.cat2.name)) + self.assertEqual( + self.widget.render(Category.objects.order_by("id")), + f"{self.cat1.pk},{self.cat2.pk}", + ) + self.assertEqual( + self.widget_name.render(Category.objects.order_by("id")), + f"{self.cat1.name},{self.cat2.name}", + ) + def test_render_value_none_as_blank(self): + self.assertEqual("", self.widget.render(None)) -class JSONWidgetTest(TestCase): +class JSONWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.value = {"value": 23} self.widget = widgets.JSONWidget() @@ -389,40 +750,54 @@ def test_render(self): def test_clean_single_quoted_string(self): self.assertEqual(self.widget.clean("{'value': 23}"), self.value) - self.assertEqual(self.widget.clean("{'value': null}"), {'value': None}) + self.assertEqual(self.widget.clean("{'value': null}"), {"value": None}) def test_clean_none(self): self.assertEqual(self.widget.clean(None), None) - self.assertEqual(self.widget.clean('{"value": null}'), {'value': None}) + self.assertEqual(self.widget.clean('{"value": null}'), {"value": None}) def test_render_none(self): self.assertEqual(self.widget.render(None), None) - self.assertEqual(self.widget.render(dict()), None) + self.assertEqual(self.widget.render({}), None) self.assertEqual(self.widget.render({"value": None}), '{"value": null}') -class SimpleArrayWidgetTest(TestCase): - +class SimpleArrayWidgetTest(TestCase, RowDeprecationTestMixin): def setUp(self): self.value = {"value": 23} self.widget = widgets.SimpleArrayWidget() def test_default_separator(self): - self.assertEqual(',', self.widget.separator) + self.assertEqual(",", self.widget.separator) def test_arg_separator(self): - self.widget = widgets.SimpleArrayWidget('|') - self.assertEqual('|', self.widget.separator) + self.widget = widgets.SimpleArrayWidget("|") + self.assertEqual("|", self.widget.separator) def test_clean_splits_str(self): s = "a,b,c" self.assertEqual(["a", "b", "c"], self.widget.clean(s)) def test_clean_returns_empty_list_for_empty_arg(self): - s = '' + s = "" self.assertEqual([], self.widget.clean(s)) def test_render(self): v = ["a", "b", "c"] s = "a,b,c" self.assertEqual(s, self.widget.render(v)) + + def test_render_no_coerce_to_string(self): + v = [1, 2, 3] + self.widget = widgets.SimpleArrayWidget(coerce_to_string=False) + self.assertEqual(v, self.widget.render(v)) + + def test_render_none_coerce_to_string_is_True(self): + self.widget = widgets.SimpleArrayWidget() + self.assertTrue(self.widget.coerce_to_string) + self.assertEqual("", self.widget.render(None)) + + def test_render_none_coerce_to_string_is_False(self): + self.widget = widgets.SimpleArrayWidget(coerce_to_string=False) + self.assertFalse(self.widget.coerce_to_string) + self.assertIsNone(self.widget.render(None)) diff --git a/tests/core/tests/utils.py b/tests/core/tests/utils.py new file mode 100644 index 000000000..51810b282 --- /dev/null +++ b/tests/core/tests/utils.py @@ -0,0 +1,19 @@ +import functools +import warnings + + +def ignore_utcnow_deprecation_warning(fn): + """ + Ignore the specific deprecation warning occurring due to openpyxl and python3.12. + """ + + @functools.wraps(fn) + def inner(*args, **kwargs): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + category=DeprecationWarning, + ) + fn(*args, **kwargs) + + return inner diff --git a/tests/core/tests/widget.py b/tests/core/tests/widget.py new file mode 100644 index 000000000..a7d817baa --- /dev/null +++ b/tests/core/tests/widget.py @@ -0,0 +1,6 @@ +from import_export import widgets + + +class HarshRussianWidget(widgets.CharWidget): + def clean(self, value, row=None, *args, **kwargs): + raise ValueError("Ова вриједност је страшна!") diff --git a/tests/core/views.py b/tests/core/views.py index 2e786203a..d7bea549a 100644 --- a/tests/core/views.py +++ b/tests/core/views.py @@ -1,9 +1,13 @@ +import warnings + from django.views.generic.list import ListView from import_export import mixins from . import models +with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) -class CategoryExportView(mixins.ExportViewFormMixin, ListView): - model = models.Category + class CategoryExportView(mixins.ExportViewFormMixin, ListView): + model = models.Category diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml new file mode 100644 index 000000000..1333f3d12 --- /dev/null +++ b/tests/docker-compose.yml @@ -0,0 +1,51 @@ +services: + postgresdb: + container_name: importexport_pgdb + environment: + DB_HOST: db + DB_PORT: 5432 + DB_NAME: import_export + IMPORT_EXPORT_POSTGRESQL_USER: ${IMPORT_EXPORT_POSTGRESQL_USER} + IMPORT_EXPORT_POSTGRESQL_PASSWORD: ${IMPORT_EXPORT_POSTGRESQL_PASSWORD} + POSTGRES_PASSWORD: ${IMPORT_EXPORT_POSTGRESQL_PASSWORD} + image: postgres:13 + restart: "no" + ports: + - "${IMPORT_EXPORT_POSTGRESQL_PORT:-5432}:5432" + volumes: + - ./docker/db/init-postgres-db.sh/:/docker-entrypoint-initdb.d/init-postgres-db.sh + - postgres-db-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}'"] + interval: 10s + timeout: 3s + retries: 3 + mysqldb: + container_name: importexport_mysqldb + image: mysql:8.0 + platform: linux/x86_64 + restart: "no" + environment: + MYSQL_DATABASE: import_export + MYSQL_USER: ${IMPORT_EXPORT_MYSQL_USER} + MYSQL_PASSWORD: ${IMPORT_EXPORT_MYSQL_PASSWORD} + MYSQL_ROOT_PASSWORD: ${IMPORT_EXPORT_MYSQL_PASSWORD} + MYSQL_PORT: + ports: + - "${IMPORT_EXPORT_MYSQL_PORT:-3306}:3306" + expose: + - '${IMPORT_EXPORT_MYSQL_PORT:-3306}' + volumes: + - ./docker/db/init-mysql-db.sh:/docker-entrypoint-initdb.d/init-mysql-db.sh + - mysql-db-data:/var/lib/mysql + healthcheck: + test: mysqladmin ping -h 127.0.0.1 -u $$MYSQL_USER --password=$$MYSQL_PASSWORD + interval: 10s + timeout: 3s + retries: 3 + +volumes: + postgres-db-data: + driver: local + mysql-db-data: + driver: local diff --git a/tests/docker/db/init-mysql-db.sh b/tests/docker/db/init-mysql-db.sh new file mode 100755 index 000000000..565125840 --- /dev/null +++ b/tests/docker/db/init-mysql-db.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -e + +mysql -u root -p"$MYSQL_ROOT_PASSWORD" <<-EOSQL + GRANT ALL PRIVILEGES ON test_import_export.* to '$MYSQL_USER'; +EOSQL \ No newline at end of file diff --git a/tests/bulk/docker/db/init-user-db.sh b/tests/docker/db/init-postgres-db.sh old mode 100644 new mode 100755 similarity index 94% rename from tests/bulk/docker/db/init-user-db.sh rename to tests/docker/db/init-postgres-db.sh index 10750e742..7e9222673 --- a/tests/bulk/docker/db/init-user-db.sh +++ b/tests/docker/db/init-postgres-db.sh @@ -1,6 +1,5 @@ #!/usr/bin/env bash set -e -echo "init-user-db.sh" psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL CREATE USER $IMPORT_EXPORT_POSTGRESQL_USER WITH PASSWORD '$IMPORT_EXPORT_POSTGRESQL_PASSWORD'; diff --git a/tests/scripts/bulk_import.py b/tests/scripts/bulk_import.py index 23eb79c51..d323a5fd4 100644 --- a/tests/scripts/bulk_import.py +++ b/tests/scripts/bulk_import.py @@ -1,8 +1,9 @@ """ Helper module for testing bulk imports. -See tests/bulk/README.md +See testing.rst """ + import time from functools import wraps @@ -12,7 +13,7 @@ from import_export import resources from import_export.instance_loaders import CachedInstanceLoader -from core.models import Book # isort:skip +from core.models import Book # isort:skip # The number of rows to be created on each profile run. # Increase this value for greater load testing. @@ -20,14 +21,13 @@ class _BookResource(resources.ModelResource): - class Meta: model = Book - fields = ('id', 'name', 'author_email', 'price') + fields = ("id", "name", "author_email", "price") use_bulk = True batch_size = 1000 skip_unchanged = True - #skip_diff = True + # skip_diff = True # This flag can speed up imports # Cannot be used when performing updates # force_init_instance = True @@ -39,9 +39,9 @@ def profile_duration(fn): def inner(*args, **kwargs): # Measure duration t = time.perf_counter() - retval = fn(*args, **kwargs) + fn(*args, **kwargs) elapsed = time.perf_counter() - t - print(f'Time {elapsed:0.4}') + print(f"Time {elapsed: 0.4}") return inner @@ -50,8 +50,10 @@ def profile_mem(fn): @wraps(fn) def inner(*args, **kwargs): # Measure memory - mem, retval = memory_usage((fn, args, kwargs), retval=True, timeout=200, interval=1e-7) - print(f'Memory {max(mem) - min(mem)}') + mem, retval = memory_usage( + (fn, args, kwargs), retval=True, timeout=200, interval=1e-7 + ) + print(f"Memory {max(mem) - min(mem)}") return retval return inner @@ -68,29 +70,42 @@ def do_import_mem(resource, dataset): def do_create(): + class _BookResource(resources.ModelResource): + class Meta: + model = Book + fields = ("id", "name", "author_email", "price") + use_bulk = True + batch_size = 1000 + skip_unchanged = True + skip_diff = True + force_init_instance = True + print("\ndo_create()") # clearing down existing objects - Book.objects.all().delete() + books = Book.objects.all() + books._raw_delete(books.db) - rows = [('', 'Some new book', 'email@example.com', '10.25')] * NUM_ROWS - dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price']) + rows = [("", "Some new book", "email@example.com", "10.25")] * NUM_ROWS + dataset = tablib.Dataset(*rows, headers=["id", "name", "author_email", "price"]) book_resource = _BookResource() do_import_duration(book_resource, dataset) do_import_mem(book_resource, dataset) - # Book objects are created once for the 'duration' run, and once for the 'memory' run + # Book objects are created once for the 'duration' run, + # and once for the 'memory' run assert Book.objects.count() == NUM_ROWS * 2 - Book.objects.all().delete() + books._raw_delete(books.db) def do_update(): print("\ndo_update()") # clearing down existing objects - Book.objects.all().delete() + books = Book.objects.all() + books._raw_delete(books.db) - rows = [('', 'Some new book', 'email@example.com', '10.25')] * NUM_ROWS + rows = [("", "Some new book", "email@example.com", "10.25")] * NUM_ROWS books = [Book(name=r[1], author_email=r[2], price=r[3]) for r in rows] # run 'update' - there must be existing rows in the DB... @@ -101,25 +116,25 @@ def do_update(): # find the ids, so that we can perform the update all_books = Book.objects.all() rows = [(b.id, b.name, b.author_email, b.price) for b in all_books] - dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price']) + dataset = tablib.Dataset(*rows, headers=["id", "name", "author_email", "price"]) book_resource = _BookResource() do_import_duration(book_resource, dataset) do_import_mem(book_resource, dataset) assert NUM_ROWS == Book.objects.count() - Book.objects.all().delete() + books = Book.objects.all() + books._raw_delete(books.db) def do_delete(): class _BookResource(resources.ModelResource): - def for_delete(self, row, instance): return True class Meta: model = Book - fields = ('id', 'name', 'author_email', 'price') + fields = ("id", "name", "author_email", "price") use_bulk = True batch_size = 1000 skip_diff = True @@ -128,9 +143,10 @@ class Meta: print("\ndo_delete()") # clearing down existing objects - Book.objects.all().delete() + books = Book.objects.all() + books._raw_delete(books.db) - rows = [('', 'Some new book', 'email@example.com', '10.25')] * NUM_ROWS + rows = [("", "Some new book", "email@example.com", "10.25")] * NUM_ROWS books = [Book(name=r[1], author_email=r[2], price=r[3]) for r in rows] # deletes - there must be existing rows in the DB... @@ -140,7 +156,7 @@ class Meta: all_books = Book.objects.all() rows = [(b.id, b.name, b.author_email, b.price) for b in all_books] - dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price']) + dataset = tablib.Dataset(*rows, headers=["id", "name", "author_email", "price"]) book_resource = _BookResource() do_import_duration(book_resource, dataset) @@ -153,7 +169,7 @@ class Meta: all_books = Book.objects.all() rows = [(b.id, b.name, b.author_email, b.price) for b in all_books] - dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price']) + dataset = tablib.Dataset(*rows, headers=["id", "name", "author_email", "price"]) do_import_mem(book_resource, dataset) assert 0 == Book.objects.count() @@ -170,4 +186,4 @@ def run(*args): else: do_create() do_update() - do_delete() \ No newline at end of file + do_delete() diff --git a/tests/settings.py b/tests/settings.py index c63139cdf..2a3f75b16 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -4,16 +4,14 @@ import django INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.sites', - - 'import_export', - - 'core', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.sites", + "import_export", + "core", ] SITE_ID = 1 @@ -22,90 +20,95 @@ DEBUG = True -STATIC_URL = '/static/' +STATIC_URL = "/static/" -SECRET_KEY = '2n6)=vnp8@bu0om9d05vwf7@=5vpn%)97-!d*t4zq1mku%0-@j' +SECRET_KEY = "2n6)=vnp8@bu0om9d05vwf7@=5vpn%)97-!d*t4zq1mku%0-@j" MIDDLEWARE = ( - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ) TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': ( - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - 'django.template.context_processors.request', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": ( + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + "django.template.context_processors.request", ), }, }, ] -if django.VERSION >= (3, 2): - DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" -if os.environ.get('IMPORT_EXPORT_TEST_TYPE') == 'mysql-innodb': +if os.environ.get("IMPORT_EXPORT_TEST_TYPE") == "mysql-innodb": IMPORT_EXPORT_USE_TRANSACTIONS = True DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': 'import_export', - 'USER': os.environ.get('IMPORT_EXPORT_MYSQL_USER', 'root'), - 'PASSWORD': os.environ.get('IMPORT_EXPORT_MYSQL_PASSWORD', 'password'), - 'HOST': '127.0.0.1', - 'PORT': 3306, - 'TEST': { - 'CHARSET': 'utf8', - 'COLLATION': 'utf8_general_ci', - } + "default": { + "ENGINE": "django.db.backends.mysql", + "NAME": "import_export", + "USER": os.environ.get("IMPORT_EXPORT_MYSQL_USER"), + "PASSWORD": os.environ.get("IMPORT_EXPORT_MYSQL_PASSWORD"), + "HOST": "127.0.0.1", + "PORT": os.environ.get("IMPORT_EXPORT_MYSQL_PORT", "3306"), + "TEST": { + "CHARSET": "utf8", + "COLLATION": "utf8_general_ci", + "NAME": "test_import_export", + }, } } -elif os.environ.get('IMPORT_EXPORT_TEST_TYPE') == 'postgres': +elif os.environ.get("IMPORT_EXPORT_TEST_TYPE") == "postgres": IMPORT_EXPORT_USE_TRANSACTIONS = True DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'import_export', - 'USER': os.environ.get('IMPORT_EXPORT_POSTGRESQL_USER'), - 'PASSWORD': os.environ.get('IMPORT_EXPORT_POSTGRESQL_PASSWORD'), - 'HOST': 'localhost', - 'PORT': 5432 + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "import_export", + "USER": os.environ.get("IMPORT_EXPORT_POSTGRESQL_USER"), + "PASSWORD": os.environ.get("IMPORT_EXPORT_POSTGRESQL_PASSWORD"), + "HOST": "localhost", + "PORT": os.environ.get("IMPORT_EXPORT_POSTGRESQL_PORT", "5432"), } } else: - if 'test' in sys.argv: - database_name = '' + if "test" in sys.argv: + database_name = "" else: - database_name = os.path.join(os.path.dirname(__file__), 'database.db') + database_name = os.path.join(os.path.dirname(__file__), "database.db") DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': database_name, + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": database_name, } } LOGGING = { - 'version': 1, - 'disable_existing_loggers': True, - 'handlers': { - 'console': { - 'class': 'logging.NullHandler' - } + "version": 1, + "disable_existing_loggers": True, + "handlers": {"console": {"class": "logging.NullHandler"}}, + "root": { + "handlers": ["console"], }, - 'root': { - 'handlers': ['console'], - }} - +} USE_TZ = False + +if django.VERSION >= (5, 0): + FORM_RENDERER = "django.forms.renderers.DjangoTemplates" +else: + FORM_RENDERER = "django.forms.renderers.DjangoDivFormRenderer" + +PASSWORD_HASHERS = [ + "django.contrib.auth.hashers.MD5PasswordHasher", +] diff --git a/tests/urls.py b/tests/urls.py index f953b005a..66bb090c1 100644 --- a/tests/urls.py +++ b/tests/urls.py @@ -7,9 +7,11 @@ admin.autodiscover() urlpatterns = [ - path('', RedirectView.as_view(url='/admin/'), name="admin-site"), - path('admin/', admin.site.urls), - path('export/category/', views.CategoryExportView.as_view(), name='export-category'), + path("", RedirectView.as_view(url="/admin/"), name="admin-site"), + path("admin/", admin.site.urls), + path( + "export/category/", views.CategoryExportView.as_view(), name="export-category" + ), ] urlpatterns += staticfiles_urlpatterns() diff --git a/tox.ini b/tox.ini index bb05fffbe..877e1f36c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,25 +1,43 @@ [tox] +min_version = 4.0 envlist = - isort - {py36,py37,py38,py39,py310}-django22-tablib{dev,stable} - {py36,py37,py38,py39,py310}-django31-tablib{dev,stable} - {py36,py37,py38,py39,py310}-django32-tablib{dev,stable} - {py38,py39,py310}-django40-tablib{dev,stable} - {py38,py39,py310}-djangomain-tablib{dev,stable} + py{39,310,311}-django{42} + py{310,311,312,313}-django{50,51,52} + py{312,313}-djangomain + py313-djangomain-tablibdev + +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 [testenv] -commands = python -W error::DeprecationWarning -W error::PendingDeprecationWarning {toxinidir}/tests/manage.py test core +setenv = PYTHONPATH = {toxinidir}/tests +commands = + python \ + -W error::DeprecationWarning \ + -W error::PendingDeprecationWarning \ + -m coverage run \ + ./tests/manage.py test core \ + {posargs} deps = - tablibdev: -egit+https://github.com/jazzband/tablib.git#egg=tablib - tablibstable: tablib - django22: Django>=2.2,<3.0 - django31: Django>=3.1,<3.2 - django32: Django>=3.2,<4.0 - django40: Django>=4.0,<4.1 + tablibdev: -egit+https://github.com/jazzband/tablib.git@master\#egg=tablib + django42: Django>=4.2,<5.0 + django50: Django>=5.0,<5.1 + django51: Django>=5.1,<5.2 + django52: Django>=5.2a1,<5.3 djangomain: https://github.com/django/django/archive/main.tar.gz - -rrequirements/test.txt + .[tests] -[testenv:isort] -skip_install = True -deps = isort -commands = isort --check-only import_export tests +# if postgres / mysql environment variables exist, we can go ahead and run db specific tests +passenv = + IMPORT_EXPORT_POSTGRESQL_USER + IMPORT_EXPORT_POSTGRESQL_PASSWORD + IMPORT_EXPORT_POSTGRESQL_PORT + IMPORT_EXPORT_MYSQL_USER + IMPORT_EXPORT_MYSQL_PASSWORD + IMPORT_EXPORT_MYSQL_PORT + IMPORT_EXPORT_TEST_TYPE