diff --git a/.github/matchers/flake8.json b/.github/matchers/flake8.json
deleted file mode 100644
index ba849aca8f..0000000000
--- a/.github/matchers/flake8.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "problemMatcher": [
- {
- "owner": "flake8",
- "pattern": [
- {
- "code": 4,
- "column": 3,
- "file": 1,
- "line": 2,
- "message": 5,
- "regexp": "^([^:]*):(\\d+):(\\d+): (\\w+\\d\\d\\d) (.*)$"
- }
- ]
- }
- ]
-}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 5dc09fbd4e..be72414559 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -20,7 +20,7 @@ jobs:
with:
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: pip
@@ -31,7 +31,7 @@ jobs:
sudo apt-get install -y libgettextpo-dev
- name: Install dependencies
run: |
- pip install --upgrade pip wheel
+ python -m pip install --upgrade pip wheel
pip install -r requirements/dev.txt
- name: Build docs
run: make docs
diff --git a/.github/workflows/osx.yml b/.github/workflows/osx.yml
deleted file mode 100644
index c6dbb7fdd0..0000000000
--- a/.github/workflows/osx.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-name: Test on MacOS
-
-on:
- push:
- paths-ignore:
- - docs/**
- branches-ignore:
- - dependabot/**
- - deepsource**
- pull_request:
- paths-ignore:
- - docs/**
-
-permissions:
- contents: read
-
-jobs:
- test-osx:
-
- runs-on: macos-latest
- strategy:
- matrix:
- requirements: [latest]
- python-version:
- - '3.8'
- - '3.9'
- - '3.10'
- - '3.11'
-
- steps:
- - uses: actions/checkout@v4
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- cache: pip
- cache-dependency-path: requirements/*.txt
- - name: Install brew dependencies
- run: |
- brew install gettext
- echo "/usr/local/opt/gettext/bin" >> $GITHUB_PATH
- - name: Update pip
- run: pip install --upgrade pip wheel
- - name: Install pip dependencies
- if: matrix.requirements == 'latest'
- run: pip install -r requirements/test.txt
- - name: Install
- run: pip install .
- - name: pytest
- run: make test
- - name: test-functional
- run: make test-functional
- - name: Coverage
- run: |
- coverage xml
- - uses: codecov/codecov-action@v3
- with:
- name: ${{ runner.os }}, Python ${{ matrix.python-version }}, ${{ matrix.requirements }}
diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml
index eea0bd73ae..db4c7c5154 100644
--- a/.github/workflows/pre-commit.yml
+++ b/.github/workflows/pre-commit.yml
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: pip
@@ -28,45 +28,3 @@ jobs:
pip install -r requirements/lint.txt
- name: pre-commit
run: pre-commit run --all
-
- pydocstyle:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v4
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.11'
- cache: pip
- cache-dependency-path: requirements/*.txt
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip wheel
- pip install -r requirements/lint.txt
- - name: Run pydocstyle
- run: |
- echo "::add-matcher::.github/matchers/flake8.json"
- pre-commit run --all pydocstyle
- echo "::remove-matcher owner=flake8::"
-
- flake8:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v4
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.11'
- cache: pip
- cache-dependency-path: requirements/*.txt
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip wheel
- pip install -r requirements/lint.txt
- - name: Run flake8
- run: |
- echo "::add-matcher::.github/matchers/flake8.json"
- pre-commit run --all flake8
- echo "::remove-matcher owner=flake8::"
diff --git a/.github/workflows/setup.yml b/.github/workflows/setup.yml
index 67c1835020..fb765a7093 100644
--- a/.github/workflows/setup.yml
+++ b/.github/workflows/setup.yml
@@ -5,6 +5,8 @@ on:
branches-ignore:
- dependabot/**
- deepsource**
+ tags:
+ - '*'
pull_request:
permissions:
@@ -20,7 +22,7 @@ jobs:
with:
submodules: recursive
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: pip
@@ -30,7 +32,7 @@ jobs:
sudo apt-get update
sudo apt-get install -y libgettextpo-dev
- name: Update pip
- run: pip install --upgrade pip wheel
+ run: python -m pip install --upgrade pip wheel
- name: Install pip dependencies
run: |
pip install -r requirements/dev.txt
@@ -58,6 +60,47 @@ jobs:
run: coverage run --append ./setup.py sdist
- name: Coverage
run: coverage xml
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
with:
name: setup
+ - uses: actions/upload-artifact@v4
+ with:
+ path: ./dist/*
+
+ pypi-publish:
+ if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')
+ needs:
+ - setup
+ runs-on: ubuntu-22.04
+ permissions:
+ # this permission is mandatory for trusted publishing
+ id-token: write
+ steps:
+ - uses: actions/download-artifact@v4
+ with:
+ # unpacks default artifact into dist/
+ # if `name: artifact` is omitted, the action will create extra parent dir
+ name: artifact
+ path: dist
+
+ - name: Publish package distributions to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+
+ github-publish:
+ if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')
+ needs:
+ - setup
+ runs-on: ubuntu-22.04
+ permissions:
+ # this permission is mandatory for creating a release
+ contents: write
+ steps:
+ - uses: actions/download-artifact@v4
+ with:
+ # unpacks default artifact into dist/
+ # if `name: artifact` is omitted, the action will create extra parent dir
+ name: artifact
+ path: dist
+ - uses: ncipollo/release-action@v1
+ with:
+ artifacts: dist/*
diff --git a/.github/workflows/linux.yml b/.github/workflows/test.yml
similarity index 71%
rename from .github/workflows/linux.yml
rename to .github/workflows/test.yml
index f2f0f69186..ba17cf1dbb 100644
--- a/.github/workflows/linux.yml
+++ b/.github/workflows/test.yml
@@ -1,4 +1,4 @@
-name: Test on Linux
+name: Tests
on:
push:
@@ -17,8 +17,9 @@ permissions:
jobs:
test:
- runs-on: ubuntu-latest
+ runs-on: ${{ matrix.os }}
strategy:
+ fail-fast: false
matrix:
requirements: [latest]
python-version:
@@ -26,24 +27,36 @@ jobs:
- '3.9'
- '3.10'
- '3.11'
+ - '3.12'
+ os:
+ - ubuntu-latest
+ - windows-latest
+ - macos-latest
include:
- requirements: minimal
python-version: '3.8'
+ os: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: requirements/*.txt
- name: Install apt dependencies
+ if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y libgettextpo-dev libxml2-dev libxmlsec1-dev gettext hunspell-af
+ - name: Install Windows dependencies
+ if: matrix.os == 'windows-latest'
+ run: |
+ nuget install Gettext.Tools -Version 0.22.4 -OutputDirectory c:\nuget;
+ Add-Content $env:GITHUB_PATH "C:\nuget\Gettext.Tools.0.22.4\tools\bin"
- name: Update pip
- run: pip install --upgrade pip wheel
+ run: python -m pip install --upgrade pip wheel
- name: Install pip dependencies
run: pip install -r requirements/test.txt
- name: Install pip dependencies
@@ -57,6 +70,7 @@ jobs:
- name: pytest
run: make test
- name: test-functional
+ if: matrix.os != 'windows-latest'
env:
PYTHON_ARGS: -m coverage run --append --source ${{ github.workspace }}/translate
COVERAGE_FILE: ${{ github.workspace }}/.coverage
@@ -64,6 +78,6 @@ jobs:
- name: Coverage
run: |
coverage xml
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
with:
name: ${{ runner.os }}, Python ${{ matrix.python-version }}, ${{ matrix.requirements }}
diff --git a/.github/workflows/win.yml b/.github/workflows/win.yml
deleted file mode 100644
index fe064af7a1..0000000000
--- a/.github/workflows/win.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-name: Test on Windows
-
-on:
- push:
- paths-ignore:
- - docs/**
- branches-ignore:
- - dependabot/**
- - deepsource**
- pull_request:
- paths-ignore:
- - docs/**
-
-permissions:
- contents: read
-
-jobs:
- test-win:
-
- runs-on: windows-latest
- strategy:
- matrix:
- requirements: [latest]
- python-version:
- - '3.8'
- - '3.9'
- - '3.10'
- - '3.11'
-
- steps:
- - uses: actions/checkout@v4
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- cache: pip
- cache-dependency-path: requirements/*.txt
- - name: Update pip
- run: python -m pip install --upgrade pip wheel
- - name: Install pip dependencies
- run: python -m pip install -r requirements/test.txt
- - name: Install
- run: pip install .
- - name: pytest
- run: make test
- - name: Coverage
- run: |
- coverage xml
- - uses: codecov/codecov-action@v3
- with:
- name: ${{ runner.os }}, Python ${{ matrix.python-version }}, ${{ matrix.requirements }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 95d83f9f17..da249084ce 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,7 +2,7 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
+ rev: v4.6.0
hooks:
- id: check-yaml
- id: check-merge-conflict
@@ -15,33 +15,18 @@ repos:
hooks:
- id: check-hooks-apply
- id: check-useless-excludes
-- repo: https://github.com/pycqa/isort
- rev: 5.12.0
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.3.5
hooks:
- - id: isort
-- repo: https://github.com/asottile/pyupgrade
- rev: v3.15.0
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix]
+ - id: ruff-format
+- repo: https://github.com/python-jsonschema/check-jsonschema
+ rev: 0.28.1
hooks:
- - id: pyupgrade
- args: [--py38-plus]
-- repo: https://github.com/psf/black
- rev: 23.10.0
- hooks:
- - id: black
-- repo: https://github.com/PyCQA/flake8
- rev: 6.1.0
- hooks:
- - id: flake8
- additional_dependencies:
- - flake8-breakpoint
- - flake8-mutable
- - flake8-polyfill
-- repo: https://github.com/PyCQA/pydocstyle
- rev: 6.3.0
- hooks:
- - id: pydocstyle
+ - id: check-github-workflows
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
- rev: v2.11.0
+ rev: v2.13.0
hooks:
- id: pretty-format-yaml
args: [--autofix, --indent, '2']
diff --git a/README.rst b/README.rst
index dd1c679084..c2b3611a47 100644
--- a/README.rst
+++ b/README.rst
@@ -139,12 +139,9 @@ simply appear in English. It is therefore recommended you install the
The package vobject is needed for ical2po and po2ical.
-The aeidon package (or gaupol if aeidon is not available) is needed for sub2po
+The aeidon package is needed for sub2po
and po2sub. Some Unicode encoded files (including most files from
) require version 0.14 or later.
-
-Gaupol might need the 'Universal Encoding Detector'
-
Trados TXT TM support requires the BeautifulSoup parser
diff --git a/docs/commands/csv2po.rst b/docs/commands/csv2po.rst
index ca95d017ff..c1e400b321 100644
--- a/docs/commands/csv2po.rst
+++ b/docs/commands/csv2po.rst
@@ -48,7 +48,7 @@ Options (csv2po):
-S, --timestamp skip conversion if the output file has newer timestamp
-P, --pot output PO Templates (.pot) rather than PO files (.po)
--charset=CHARSET set charset to decode from csv files
---columnorder=COLUMNORDER specify the order and position of columns (location,source,target)
+--columnorder=COLUMNORDER specify the order and position of columns (location,source,target,context)
--duplicates=DUPLICATESTYLE
what to do with duplicate strings (identical source
text): :doc:`merge, msgctxt `
@@ -68,7 +68,7 @@ Options (po2csv):
-x EXCLUDE, --exclude=EXCLUDE exclude names matching EXCLUDE from input paths
-o OUTPUT, --output=OUTPUT write to OUTPUT in csv format
-S, --timestamp skip conversion if the output file has newer timestamp
---columnorder=COLUMNORDER specify the order and position of columns (location,source,target)
+--columnorder=COLUMNORDER specify the order and position of columns (location,source,target,context)
.. _csv2po#csv_file_layout:
diff --git a/docs/commands/index.rst b/docs/commands/index.rst
index 1e1966bf4d..73543a86bc 100644
--- a/docs/commands/index.rst
+++ b/docs/commands/index.rst
@@ -182,8 +182,6 @@ Scripts
:maxdepth: 1
:hidden:
- moz-l10n-builder
- mozilla_l10n_scripts
phase
pocommentclean
pocompendium
@@ -197,9 +195,6 @@ The scripts are for working with and manipulating PO files. Unlike the
Some of them are packaged since version 1.0 of the Toolkit, but you might need
to download them from version control and do a manual installation .
-* :doc:`moz-l10n-builder` -- Create Mozilla XPIs and rebuild Windows installers
- from existing translations
-* :doc:`mozilla_l10n_scripts` -- Build Mozilla products Firefox and Thunderbird
* :doc:`phase` -- Helps manage a project divided into phases of work, including
sending, checking, etc
* :doc:`pocompendium` -- Creates various types of PO compendium (i.e. combines
diff --git a/docs/commands/moz-l10n-builder.rst b/docs/commands/moz-l10n-builder.rst
deleted file mode 100644
index 6afdf27aa7..0000000000
--- a/docs/commands/moz-l10n-builder.rst
+++ /dev/null
@@ -1,118 +0,0 @@
-
-.. _moz-l10n-builder:
-
-moz-l10n-builder
-****************
-
-Take a set of Mozilla (Firefox, Thunderbird, SeaMonkey, etc.) localisation and
-migrate them to the latest Mozilla source, building XPIs and repackaging hte
-Windows .exe file as needed.
-
-Please also check the page on `creating a language pack
-`_ on the
-Mozilla wiki, to stay abreast of the latest Mozilla way of doing things.
-
-.. note:: This page is only applicable to Mozilla products with its source
- hosted in CVS. This includes Firefox versions before 3.1 and Thunderbird
- versions before 3.0.
-
- For information about working with the new source trees in Mercurial, see the :doc:`mozilla_l10n_scripts` page.
-
-.. _moz-l10n-builder#prerequisites:
-
-Prerequisites
-=============
-
-* Translation update component and building XPIs
-
- * :doc:`Translate Toolkit `
- * Existing Mozilla translations in PO format
- * A checkout of `Mozilla sources
- `_
- updated to the correct `BRANCH or RELEASE
- `_
-
-* Building Windows executables
-
- * Firefox or Thunderbird `en-US .exe
- `_ file e.g.
- `Firefox 2.0 en-US
- `_
- * `upx `_ for executable compression
- * `Nullsoft installer `_ to package
- the installer.
- * `7zip `_ for various compression
- * Linux: `WINE `_ to run the Nullsoft installer
-
-* Directory structure under the directory you want to run moz-l10n-builder in:
-
-+-----------+--------------------------------------------------------------+
-| l10n/ | Contains Mozilla l10n files for available/needed language(s) |
-+-----------+--------------------------------------------------------------+
-| mozilla/ | The Mozilla source tree |
-+-----------+--------------------------------------------------------------+
-| po/ | Contains your PO files (output from moz2po) |
-+-----------+--------------------------------------------------------------+
-| potpacks/ | Where POT-archives go |
-+-----------+--------------------------------------------------------------+
-
-Note these instructions are for building on Linux, they may work on Windows.
-All software should be available through your distribution. You will need to
-use Wine to install the Nullsoft installer and may need to sort out some path
-issues to get it to run correctly.
-
-.. _moz-l10n-builder#latest_version:
-
-Latest Version
-==============
-
-moz-l10n-builer is not currently distributed as part of the toolkit. You can
-get the `latest version from Git
-`_
-and you will also need this `minor patch
-`_
-to the mozilla source code.
-
-.. _moz-l10n-builder#usage:
-
-Usage
-=====
-
-::
-
- moz-l10n-builder [language-code|ALL]
-
-Where:
-
-+----------------+-----------------------------------------------------------+
-| language-code | build only the supplied languages, or build ALL if |
-| | specified or if no option is supplied |
-+----------------+-----------------------------------------------------------+
-
-Your translations will not be modified.
-
-.. _moz-l10n-builder#operation:
-
-Operation
-=========
-
-moz-l10n-builder does the following:
-
-* Updates the mozilla/ directory
-* Creates POT files
-* Migrates your translations to this new POT file
-* Converts the migrated POT files to .dtd and .properties files
-* Builds XPI and .exe files
-* Performs various hacks to cater for the anomalies of file formats
-* Outputs a diff of you migrated PO files and your newly generated Mozilla
- l10n/ files
-
-.. _moz-l10n-builder#bugs:
-
-Bugs
-====
-
-Currently it is too Translate.org.za specific and not easily configurable
-without editing. It is also not intelligent enough to work our that you want
-Firefox vs Thunderbird generation. A lot of this functionality should be in
-the Mozilla source code itself. We hope over time that this might happen.
diff --git a/docs/commands/moz2po.rst b/docs/commands/moz2po.rst
index c081dcb302..5bdf56f63c 100644
--- a/docs/commands/moz2po.rst
+++ b/docs/commands/moz2po.rst
@@ -12,9 +12,7 @@ thus provides a complete roundtrip for Mozilla localisation using PO files and
PO editors.
.. note:: This page should only be used as a reference to the command-line
- options for moz2po and po2moz. For more about using the Translate Toolkit
- and PO files for translating Mozilla products, please see the page on
- :doc:`mozilla_l10n_scripts`.
+ options for moz2po and po2moz.
.. _moz2po#usage:
@@ -89,9 +87,6 @@ Examples
Creating POT files
------------------
-.. seealso:: :doc:`Creating Mozilla POT files
- `.
-
After extracting the en-US l10n files, you can run the following command::
moz2po -P l10n/en-US pot
diff --git a/docs/commands/mozilla_l10n_scripts.rst b/docs/commands/mozilla_l10n_scripts.rst
deleted file mode 100644
index d3ffc1f0a3..0000000000
--- a/docs/commands/mozilla_l10n_scripts.rst
+++ /dev/null
@@ -1,244 +0,0 @@
-
-.. _mozilla_l10n_scripts:
-
-Mozilla L10n Scripts
-********************
-
-.. _mozilla_l10n_scripts#introduction:
-
-Introduction
-============
-This page describes the purpose and usage of scripts available in the Translate
-Toolkit specifically for making the translation of Mozilla products easier.
-
-Mozilla's move from CVS to Mercurial made a lot of these scripts necessary. For
-more information about Mozilla l10n from CVS, see the :doc:`moz-l10n-builder`
-page.
-
-All of these scripts are available on Subversion from `here
-`_.
-
-We are currently generating POT files for most major betas, RCs and releases of
-Firefox and Thunderbird. They are available here:
-http://l10n.mozilla.org/pootle/pot/
-
-As a start you might want to just use these POT files and gradually learn more
-about the processes described below. Contact us for more help on using these.
-
-.. _mozilla_l10n_scripts#requirements:
-
-Requirements
-============
-
-* The :doc:`Translate Toolkit ` (>=1.3)
-* All scripts in the ``tools/mozilla`` directory (from the project sources)
- should be executable and in your ``PATH``.
-
-.. _build_ff3.1_langs.sh:
-
-build_ff3.1_langs.sh
-====================
-
-.. _build_ff3.1_langs.sh#description:
-
-Description
------------
-This is a simple bash script that embodies most of the Mozilla l10n process and
-does the following:
-
-#. Update Mozilla sources
-#. Update language files from `Mozilla's L10n
- `_ Mercurial repository.
-#. Replace old l10n en-US files with a fresh copy from the updated source tree.
-#. :doc:`Create new POT files ` from the
- :ref:`en-US ` l10n files.
-#. Create archives of the POT files.
-#. For each language:
-
- #. Update existing PO files if the checked out from a CVS, Subversion or
- Mercurial repository.
- #. :doc:`Migrate ` PO files to new POT
- files.
- #. :doc:`Create Mozilla l10n files ` for the language based on the
- migrated PO files.
- #. Create archives of the PO files.
- #. :ref:`Build langpack ` for the
- language.
-
-This script is used on the l10n.mozilla.org server to create most (if not all)
-of the files available from http://l10n.mozilla.org/pootle/. It was originally
-written as a stable way to provide these files and as such making it as general
-as possible was not the biggest requirement. This is evident in the script's
-very narrow focus.
-
-.. _build_ff3.1_langs.sh#usage:
-
-Usage
------
-This script takes no command-line parameters and is only configurable via the
-variables at the top and, failing that, custom hacking of the script.
-
-The variables are used in the following ways:
-
-+--------------------+-------------------------------------------------------+
-| ``BUILD_DIR`` | The base build directory from where building is done. |
-+--------------------+-------------------------------------------------------+
-| ``MOZCENTRAL_DIR`` | The directory containing a checkout of the Mozilla |
-| | source tree http://hg.mozilla.org/mozilla-central/ |
-+--------------------+-------------------------------------------------------+
-| ``HG_LANGS`` | A space-separated list of language codes to build |
-| | for. |
-+--------------------+-------------------------------------------------------+
-| ``L10N_DIR`` | The directory where Mozilla l10n files |
-| | (from l10n-central) should be collected. |
-+--------------------+-------------------------------------------------------+
-| ``PO_DIR`` | The directory containing the externally-hosted or |
-| | previously available source PO files (e.g. PO files |
-| | managed in another VCS repository). It contains a |
-| | sub-directory for each language. |
-+--------------------+-------------------------------------------------------+
-| ``POPACK_DIR`` | The output directory for PO archives. |
-+--------------------+-------------------------------------------------------+
-| ``PORECOVER_DIR`` | The directory to put recovered PO files in. It |
-| | contains a sub-directory for each language. |
-+--------------------+-------------------------------------------------------+
-| ``POT_INCLUDES`` | A space-separated list of files to be included in POT |
-| | archives. |
-+--------------------+-------------------------------------------------------+
-| ``POTPACK_DIR`` | The output directory for POT archives. |
-+--------------------+-------------------------------------------------------+
-| ``POUPDATED_DIR`` | The directory to use for updated PO files. It |
-| | contains a sub-directory for each language. |
-+--------------------+-------------------------------------------------------+
-| ``LANGPACK_DIR`` | The directory to put langpacks (XPIs) in. |
-+--------------------+-------------------------------------------------------+
-| ``FF_VERSION`` | The version of Firefox that is being built for. This |
-| | is used in the file names of archives. |
-+--------------------+-------------------------------------------------------+
-
-.. note:: It is **strongly** recommended that you mirror the directory
- structure specified by the default values of the ``*_DIR`` variables. For
- example the default value for ``L10N_DIR`` is ``${BUILD_DIR}/l10n``, then
- you should put your l10n-central check-outs in the ``l10n`` directory under
- your main build directory (``BUILD_DIR``).
-
- Basically, you should have an ideally separate build directory containing
- the following sub-directories: ``l10n``, ``mozilla-central``, ``po``,
- ``popacks``, ``potpacks``, ``po-updated`` and ``xpi`` (if used). This way
- the only variable that need to be changed is ``BUILD_DIR``.
-
-.. _build_tb3_langs.sh:
-
-build_tb3_langs.sh
-==================
-This is the script that the ``build_ff3.1_langs.sh`` script above was actually
-adapted from. It is 90% similar with the obvious exception that it is aimed at
-building Thunderbird 3.0 packages in stead of Firefox 3.1. Also note that this
-script uses the comm-central repository in stead of mozilla-central.
-
-.. _buildxpi.py:
-
-buildxpi.py
-===========
-
-.. _buildxpi.py#description:
-
-Description
------------
-Creates XPI language packs from Mozilla sources and translated l10n files. This
-script has only been tested with Firefox 3.1 beta sources.
-
-It is basically the scripted version of the process described on Mozilla's
-`"Creating a language pack"
-`_ page.
-
-This script is used by ``build_ff3.1_langs.sh`` to build language packs in its
-final step.
-
-.. note:: This script uses the ``.mozconfig`` file in your home directory. Any
- existing ``.mozconfig`` is renamed to ``.mozconfig.bak`` during operation
- and copied back afterwards.
-
-.. _buildxpi.py#usage:
-
-Usage
------
-::
-
- buildxpi.py [] [ ...]
-
-Example::
-
- buildxpi.py -L /path/to/l10n -s /path/to/mozilla-central -o /path/to/xpi_output af ar
-
-Options:
-
--h, --help show this help message and exit
--L L10NBASE, --l10n-base=L10NBASE
- The directory containing the subdirectory.
--o OUTPUTDIR, --output-dir=OUTPUTDIR
- The directory to copy the built XPI to (default:
- current directory).
--p MOZPRODUCT, --mozproduct=MOZPRODUCT
- The Mozilla product name (default: "browser").
--s SRCDIR, --src=SRCDIR
- The directory containing the Mozilla l10n sources.
--d, --delete-dest Delete output XPI if it already exists.
--v, --verbose Be more noisy
-
-.. _get_moz_enus.py:
-
-get_moz_enUS.py
-===============
-
-.. _get_moz_enus.py#description:
-
-Description
------------
-A simple script to collect the en-US l10n files from a Mozilla source tree
-(``'comm-central``' or ``'mozilla-central``') by traversing the product's
-``l10n.ini`` file.
-
-.. _get_moz_enus.py#usage:
-
-Usage
------
-
-::
-
- get_moz_enUS.py [options]
-
-Options:
-
--h, --help show this help message and exit
--s SRCDIR, --src=SRCDIR
- The directory containing the Mozilla l10n sources.
--d DESTDIR, --dest=DESTDIR
- The destination directory to copy the en-US locale
- files to.
--p MOZPRODUCT, --mozproduct=MOZPRODUCT
- The Mozilla product name.
---delete-dest Delete the destination directory (if it exists).
--v, --verbose Be more noisy
-
-.. _moz-l10n-builder#deprecated:
-
-moz-l10n-builder
-================
-This is the pre-Mercurial build script originally written by Dwayne Bailey.
-This is the script that all the others on this page replaces for post-CVS
-Mozilla l10n.
-
-.. note:: This script is **not** applicable to the l10n process of any Mozilla products after the move to Mercurial.
-
-For more information about this script see its :doc:`dedicated page
-`.
-
-.. _moz_l10n_builder.py:
-
-moz_l10n_builder.py
-===================
-This script was intended to be a simple and direct port of the
-``moz-l10n-builder`` script from above. It has pro's and cons in comparison to
-the original, but is very similar for the most part. So for more information
-about this script, see the original script's :doc:`page `.
diff --git a/docs/commands/sub2po.rst b/docs/commands/sub2po.rst
index 892189b874..f41fbacf02 100644
--- a/docs/commands/sub2po.rst
+++ b/docs/commands/sub2po.rst
@@ -102,5 +102,5 @@ Bugs
----
There might be some issues with encodings, since the srt files don't specify
them. We assume files to be encoded in UTF-8, so a conversion should solve this
-easily. Note that most of the handling of the srt files come from gaupol.
+easily. Note that most of the handling of the srt files come from aeidon.
diff --git a/docs/conf.py b/docs/conf.py
index c9ec80aa10..2c650f7f48 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -23,7 +23,7 @@
copyright = "2002-2023, Translate"
# The short X.Y version.
-version = "3.10.1"
+version = "3.12.2"
# The full version, including alpha/beta/rc tags
release = version
diff --git a/docs/developers/releasing.rst b/docs/developers/releasing.rst
index 4b9cdb66dc..9998701d69 100644
--- a/docs/developers/releasing.rst
+++ b/docs/developers/releasing.rst
@@ -10,36 +10,9 @@ lists and suggests some possible cleanup tasks to be done after releasing.
free to improve it.
-Create the package
+Prepare the relese
==================
-The first steps are to create and validate a package for the next release.
-
-
-Get a clean checkout
---------------------
-
-We work from a clean checkout to ensure that everything you are adding to the
-build is what is in the repository and doesn't contain any of your uncommitted
-changes. It also ensures that someone else could replicate your process.
-
-.. code-block:: console
-
- $ git clone git@github.com:translate/translate.git translate-release
- $ cd translate-release
- $ git submodule update --init
-
-
-Check copyright dates
----------------------
-
-Update any copyright dates in :file:`docs/conf.py:copyright` and anywhere else
-that needs fixing.
-
-.. code-block:: console
-
- $ git grep 2013 # Should pick up anything that should be examined
-
Create release notes
--------------------
@@ -127,82 +100,12 @@ release of a ``$MINOR`` version will always have a ``$MICRO`` of ``.0``. So
tests, specifically the manpage ones, to use the right new version.
-Build the package
------------------
-
-Building is the first step to testing that things work. From your clean
-checkout run:
-
-.. code-block:: console
-
- $ mkvirtualenv build-ttk-release
- (build-ttk-release)$ pip install --upgrade setuptools pip
- (build-ttk-release)$ pip install -r requirements/dev.txt
- (build-ttk-release)$ make build
- (build-ttk-release)$ deactivate
-
-
-This will create a tarball in :file:`dist/` which you can use for further
-testing.
-
-.. note:: We use a clean checkout just to make sure that no inadvertent changes
- make it into the release.
-
-
-Test install and other tests
-----------------------------
-
-The easiest way to test is in a virtualenv. You can test the installation of
-the new release using:
-
-.. code-block:: console
-
- $ mkvirtualenv test-ttk-release
- (test-ttk-release)$ pip install --upgrade setuptools pip
- (test-ttk-release)$ pip install dist/translate-toolkit-$version.tar.gz
-
-
-You can then proceed with other tests such as checking:
-
-#. Documentation is available in the package
-#. Converters and scripts are installed and run correctly:
-
- .. code-block:: console
-
- (test-ttk-release)$ moz2po --help
- (test-ttk-release)$ php2po --version
- (test-ttk-release)$ deactivate
- $ rmvirtualenv test-ttk-release
-
-#. Meta information about the package is correct. This is stored in
- :file:`setup.py`, to see some options to display meta-data use:
-
- .. code-block:: console
-
- $ ./setup.py --help
-
- Now you can try some options like:
-
- .. code-block:: console
-
- $ ./setup.py --name
- $ ./setup.py --version
- $ ./setup.py --author
- $ ./setup.py --author-email
- $ ./setup.py --url
- $ ./setup.py --license
- $ ./setup.py --description
- $ ./setup.py --long-description
- $ ./setup.py --classifiers
-
- The actual descriptions are taken from :file:`translate/__init__.py`.
-
-
Publish the new release
=======================
-Once we have a valid package it is necessary to publish it and announce the
-release.
+Once the ``master`` branch is ready, the package can be published. This is
+automated by :file:`.github/workflows/setup.yml` which automatically publishes
+tagged release to PyPI and GitHub.
Tag and branch the release
@@ -237,59 +140,6 @@ Use the admin pages to flag a version that should be published.
here.
-Publish on PyPI
----------------
-
-.. - `Submitting Packages to the Package Index
- `_
-
-
-.. note:: You need a username and password on `Python Package Index (PyPI)
- `_ and have rights to the project before you
- can proceed with this step.
-
- These can be stored in :file:`$HOME/.pypirc` and will contain your username
- and password. Check `Create a PyPI account
- `_
- for more details.
-
-
-Run the following to publish the package on PyPI:
-
-.. code-block:: console
-
- $ workon build-ttk-release
- (build-ttk-release)$ twine upload dist/translate*
- (build-ttk-release)$ deactivate
- $ rmvirtualenv build-ttk-release
-
-
-.. _releasing#create-github-release:
-
-Create a release on Github
---------------------------
-
-- https://github.com/translate/translate/releases/new
-
-You will need:
-
-- Tarball of the release
-- Release notes in Markdown
-
-
-Do the following to create the release:
-
-#. Draft a new release with the corresponding tag version
-#. Convert the major changes (no more than five) in the release notes to
- Markdown with `Pandoc `_. Bugfix releases can replace
- the major changes with *This is a bugfix release for the X.X.X branch.*
-#. Add the converted major changes to the release description
-#. Include at the bottom of the release description a link to the full release
- notes at Read the Docs
-#. Attach the tarball to the release
-#. Mark it as pre-release if it's a release candidate
-
-
Update Translate Toolkit website
--------------------------------
@@ -307,6 +157,13 @@ We use github pages for the website. First we need to checkout the pages:
#. :command:`git commit` and :command:`git push` -- changes are quite quick, so
easy to review.
+.. _releasing#create-github-release:
+
+Updating release notes on Github
+--------------------------------
+
+#. Open GitHub release created by the GitHub Action.
+#. Edit it to include release notes (use same text as used on website).
Announce to the world
---------------------
diff --git a/docs/formats/android.rst b/docs/formats/android.rst
index 24a21ad575..8cbf4372ba 100644
--- a/docs/formats/android.rst
+++ b/docs/formats/android.rst
@@ -6,6 +6,8 @@ Android string resources
:wp:`Android ` programs make use of localisable
string resources.
+:guilabel:`Mobile Kotlin resources` are supported as a variant of this format.
+
.. note:: The toolkit supports this format, but still doesn't provide any
converter.
@@ -22,3 +24,4 @@ References
* `Localizing Android Applications `_ tutorial
* Reference for `translatable attribute
`_
+* `Mobile Kotlin resources `_
diff --git a/docs/formats/json.rst b/docs/formats/json.rst
index 22e53b4d72..4816728a68 100644
--- a/docs/formats/json.rst
+++ b/docs/formats/json.rst
@@ -39,3 +39,4 @@ Following JSON dialects are supported
* `go-i18n v1 & v2 `_
* `gotext `_
* `ARB `_
+* `FormatJS `_
diff --git a/docs/formats/subtitles.rst b/docs/formats/subtitles.rst
index 9f35cf9a17..7299e691bb 100644
--- a/docs/formats/subtitles.rst
+++ b/docs/formats/subtitles.rst
@@ -28,8 +28,8 @@ YouTube supports `a number of formats
Implementation details
======================
-Format support is provided by `Gaupol `_ a
-subtitling tool. Further enhancement of format support in Gaupol will directly
+Format support is provided by `aeidon `_ library.
+Further enhancement of format support in aeidon will directly
benefit our conversion ability.
.. _subtitles#usage:
diff --git a/docs/guides/creating_mozilla_pot_files.rst b/docs/guides/creating_mozilla_pot_files.rst
deleted file mode 100644
index 22e3d4c8f4..0000000000
--- a/docs/guides/creating_mozilla_pot_files.rst
+++ /dev/null
@@ -1,67 +0,0 @@
-
-.. _creating_mozilla_pot_files:
-
-Creating Mozilla POT files
-**************************
-
-You can do this using Mozilla source from CVS or Mercurial
-
-.. _creating_mozilla_pot_files#using_mercurial:
-
-Using Mercurial
-===============
-
-Since Firefox 3.1 and Thunderbird 3.0, Mozilla has switched to using Mercurial
-for version control. See the Mozilla's `L10n on Mercurial
-`_ page for
-instructions on how to checkout and update your Mozilla sources and l10n files.
-
-You can use :ref:`get_moz_enUS.py ` to
-extract an en-US directory from the source tree:
-
-::
-
- get_moz_enUS.py -s mozilla-central/ -d l10n/ -p browser
-
-This will move the correct en-US files to ``l10n/en-US``. You can now create
-POT files as follows::
-
- moz2po -P l10n/en-US l10n/pot
-
-This will create the POT files in ``l10n/pot`` using the American English files
-from ``en-US``. You now have a set of POT files that you can use for
-translation or updating your existing PO files.
-
-There are also :doc:`other scripts ` that can
-help with creating and updating POT and PO files for Mozilla localisation.
-
-.. _creating_mozilla_pot_files#using_cvs:
-
-Using CVS
-=========
-
-Firefox versions before 3.1 and Thunderbird versions before 3.0 still has its
-source in CVS. Check out files from the Mozilla repository. If you don't want
-to checkout all files do::
-
- make -f client.mk l10n-checkout
-
-The English files are in the ``mozilla/`` module, while the translated files
-all reside in the ``l10n/`` module. They have different structure but not
-enough to kill you.
-
-Once you have checked out ``mozilla/`` you will need to get the correct files
-for en-US. To do this we will create en-US as a pseudo language.
-
-::
-
- make -f tools/l10n/l10n.mk create-en-US
-
-This will move the correct en-US files to ``l10n/en-US``. You can now create
-POT files as follows::
-
- moz2po -P l10n/en-US l10n/pot
-
-This will create the POT files in ``l10n/pot`` using the American English files
-from ``en-US``. You now have a set of POT files that you can use for
-translation or updating your existing PO files.
diff --git a/docs/guides/index.rst b/docs/guides/index.rst
index 332d3c3a16..700b6131c0 100644
--- a/docs/guides/index.rst
+++ b/docs/guides/index.rst
@@ -16,7 +16,6 @@ Use Cases
creating_a_terminology_list_from_your_existing_translations
running_the_tools_on_microsoft_windows
cleanup_translator_comments
- creating_mozilla_pot_files
document_translation
* :doc:`Migrating an older version of your translations to the latest templates
@@ -33,5 +32,4 @@ Use Cases
`
* Using ``phase`` for the complete translation roundtrip
* :doc:`Cleanup translator comments `
-* :doc:`Creating Mozilla POT files `
* :doc:`Document translation `
diff --git a/docs/releases/3.11.0.rst b/docs/releases/3.11.0.rst
new file mode 100644
index 0000000000..1d717ce7de
--- /dev/null
+++ b/docs/releases/3.11.0.rst
@@ -0,0 +1,45 @@
+Translate Toolkit 3.11.0
+************************
+
+*Released on 8 November 2023*
+
+This release contains improvements and bug fixes.
+
+Changes
+=======
+
+Formats and Converters
+----------------------
+
+- Android
+
+ - Improved output escaping
+ - Removing unit now removes attached comments as well
+
+- JSON
+
+ - Fix round trip of null values
+
+- TS2
+
+ - Expand relative locations
+
+Tools
+-----
+
+- junitmsgfmt fixed for non-English locales
+
+Other changes
+-------------
+
+- Added support for Python 3.12
+- Consolidated CI pipeline
+- Use ruff for linting and code formatting
+- Move tests and test data to sit outside the modules
+
+Contributors
+============
+
+This release was made possible by the following people:
+
+Michal Čihař, Stuart Prescott
diff --git a/docs/releases/3.11.1.rst b/docs/releases/3.11.1.rst
new file mode 100644
index 0000000000..bac65ce2c3
--- /dev/null
+++ b/docs/releases/3.11.1.rst
@@ -0,0 +1,23 @@
+Translate Toolkit 3.11.0
+************************
+
+*Released on 15 November 2023*
+
+This release contains improvements and bug fixes.
+
+Changes
+=======
+
+Formats and Converters
+----------------------
+
+- Language data
+
+ - Update to CLDR 44
+
+Contributors
+============
+
+This release was made possible by the following people:
+
+Michal Čihař
diff --git a/docs/releases/3.12.0.rst b/docs/releases/3.12.0.rst
new file mode 100644
index 0000000000..cd2604a203
--- /dev/null
+++ b/docs/releases/3.12.0.rst
@@ -0,0 +1,49 @@
+Translate Toolkit 3.12.0
+************************
+
+*Released on 21 December 2023*
+
+This release contains improvements and bug fixes.
+
+Changes
+=======
+
+Formats and Converters
+----------------------
+
+- Android
+
+ - Fixed language definitions for some languages
+ - Added support for MOKO resources
+ - Improved handling of CDATA in strings
+
+- Gettext PO
+
+ - Honor line wrapping configuration when adding unit
+
+- Qt TS2
+
+ - Use correct plurals matching Qt implementation
+
+- Properties
+
+ - Notes behave consistently with other formats
+
+- RESX/RESW
+
+ - Use Windows newlines instead of UNIX ones
+
+- JSON
+
+ - Improved parsing of string keys
+
+- INI
+
+ - Do not treat lines starting with ``rem`` as comments
+
+Contributors
+============
+
+This release was made possible by the following people:
+
+Michal Čihař
diff --git a/docs/releases/3.12.1.rst b/docs/releases/3.12.1.rst
new file mode 100644
index 0000000000..96fc101f07
--- /dev/null
+++ b/docs/releases/3.12.1.rst
@@ -0,0 +1,23 @@
+Translate Toolkit 3.12.1
+************************
+
+*Released on 21 December 2023*
+
+This release contains improvements and bug fixes.
+
+Changes
+=======
+
+Formats and Converters
+----------------------
+
+- Android
+
+ - Fixed parsing of some translation files
+
+Contributors
+============
+
+This release was made possible by the following people:
+
+Michal Čihař
diff --git a/docs/releases/3.12.2.rst b/docs/releases/3.12.2.rst
new file mode 100644
index 0000000000..7802edbc3c
--- /dev/null
+++ b/docs/releases/3.12.2.rst
@@ -0,0 +1,43 @@
+Translate Toolkit 3.12.2
+************************
+
+*Released on 1 February 2024*
+
+This release contains improvements and bug fixes.
+
+Changes
+=======
+
+Formats and Converters
+----------------------
+
+- JSON
+
+ - Added support for FormatJS
+ - Improved error handling
+ - Improved indentation
+ - Preserve gotext key type on round-trip
+ - Improved plural parsing in gotext
+
+- XLIFF
+
+ - Improved whitespace handling
+
+- CSV
+
+ - Improved error handling
+
+- Android
+
+ - Remove not necessary escaping
+
+- TS2
+
+ - Whitespace compatibility with lupdate
+
+Contributors
+============
+
+This release was made possible by the following people:
+
+Michal Čihař
diff --git a/docs/releases/index.rst b/docs/releases/index.rst
index 7097aa39a7..0067b9506c 100644
--- a/docs/releases/index.rst
+++ b/docs/releases/index.rst
@@ -17,6 +17,11 @@ Final releases
.. toctree::
:maxdepth: 1
+ 3.12.2 <3.12.2>
+ 3.12.1 <3.12.1>
+ 3.12.0 <3.12.0>
+ 3.11.1 <3.11.1>
+ 3.11.0 <3.11.0>
3.10.1 <3.10.1>
3.10.0 <3.10.0>
3.9.2 <3.9.2>
diff --git a/pyproject.toml b/pyproject.toml
index 6118b3b03c..8e65148151 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,3 +20,109 @@ known_third_party = [
"setuptools"
]
profile = "black"
+
+[tool.ruff]
+output-format = "github"
+target-version = "py38"
+
+[tool.ruff.lint]
+# CONFIG - intentional configuration
+# TODO - needs decision whether intention, add noqa tags or fix
+# WONTFIX - not fixable in current codebase, might be better to go for noqa
+ignore = [
+ "PT013", # TODO: Found incorrect import of pytest, use simple `import pytest` instead
+ "PT009", # TODO: Use a regular `assert` instead of unittest-style `assertCountEqual`
+ "PLE0604", # TODO: Invalid object in `__all__`, must contain only strings
+ "PLW1510", # TODO: `subprocess.run` without explicit `check` argument
+ "SLOT000", # TODO: Subclasses of `str` should define `__slots__`
+ "B028", # TODO: No explicit `stacklevel` keyword argument found
+ "TRY301", # TODO: Abstract `raise` to an inner function
+ "B006", # TODO: Do not use mutable data structures for argument defaults
+ "PLR0913", # TODO: Too many arguments in function definition
+ "PERF203", # TODO: `try`-`except` within a loop incurs performance overhead
+ "ERA001", # TODO: Found commented-out code
+ "B023", # TODO: Function definition does not bind loop variable
+ "SIM115", # TODO: Use context handler for opening files
+ "E741", # TODO: Ambiguous variable name:
+ "B007", # TODO: Loop control variable not used within loop body
+ "B020", # TODO: Loop control variable `node` overrides iterable it iterates
+ "PLW2901", # TODO: `for` loop variable `input_unit` overwritten by assignment target
+ "TD004", # TODO: Missing colon in TODO
+ "TD005", # TODO: Missing issue description after `TODO`
+ "TD007", # TODO: Missing space after colon in TODO
+ "SLF001", # TODO: Private member accessed
+ "RUF012", # TODO: Mutable class attributes should be annotated with `typing.ClassVar`
+ "TRY003", # WONTFIX: Avoid specifying long messages outside the exception class
+ "SIM102", # TODO: Use a single `if` statement instead of nested `if` statements
+ "ANN", # TODO: type annotations missing
+ "COM", # CONFIG: No trailing commas
+ "DTZ005", # TODO: The use of `datetime.datetime.now()` without `tz` argument is not allowed
+ "D1", # TODO: Missing docstring
+ "PT011", # TODO: `pytest.raises(ValueError)` is too broad, set the `match` parameter or use a more specific exception
+ "D205", # TODO: 1 blank line required between summary line and description
+ "D401", # TODO: First line of docstring should be in imperative mood
+ "D402", # TODO: First line should not be the function's signature
+ "D404", # TODO: First word of the docstring should not be "This"
+ "D203", # CONFIG: incompatible with D211
+ "D212", # CONFIG: incompatible with D213
+ "B904", # TODO: Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling
+ "BLE001", # WONTFIX: Do not catch blind exception: `Exception`, third-party modules do not have defined exceptions
+ "ARG001", # TODO: Unused function argument (mostly for API compatibility)
+ "ARG002", # TODO: Unused method argument (mostly for API compatibility)
+ "ARG004", # TODO: Unused static method argument
+ "ARG005", # TODO: Unused lambda argument:
+ "TD002", # CONFIG: no detailed TODO documentation is required
+ "TD003", # CONFIG: no detailed TODO documentation is required
+ "S603", # CONFIG: `subprocess` call: check for execution of untrusted input
+ "S607", # CONFIG: executing system installed tools
+ "PD011", # WONTFIX: Use `.to_numpy()` instead of `.values
+ "FIX001", # TODO: Line contains FIXME, consider resolving the issue
+ "FIX002", # CONFIG: we use TODO
+ "S101", # CONFIG: Use of `assert` detected
+ "FIX003", # TODO: Line contains XXX, consider resolving the issue
+ "TD001", # TODO: Invalid TODO tag: `FIXME`
+ "FIX004", # TODO: Line contains HACK, consider resolving the issue
+ "N801", # TODO: Class name should use CapWords convention
+ "N802", # TODO: Function name `FAIL` should be lowercase
+ "N803", # TODO: Argument name `attributeValue` should be lowercase
+ "N806", # TODO: Variable `sourcelanguageNode` in function should be lowercase
+ "N813", # TODO: Camelcase `StateEnum` imported as lowercase `state`
+ "N815", # TODO: Variable `bodyNode` in class scope should not be mixedCase
+ "N816", # TODO: Variable `theDOMImplementation` in global scope should not be mixedCase
+ "N818", # TODO: Exception name `FilterFailure` should be named with an Error suffix
+ "PLR2004", # TODO: Magic value used in comparison, consider replacing with a constant variable
+ "PLR0911", # TODO: Too many return statements
+ "PLR0912", # TODO: Too many branches
+ "PLR0915", # TODO: Too many statements
+ "C901", # TODO: too complex
+ "FBT", # TODO: Boolean in function definition
+ "EM", # TODO: Exception strings
+ "E501", # WONTFIX: we accept long strings (rest is formatted by black)
+ "RUF001", # WONTFIX: String contains ambiguous unicode character, we are using Unicode
+ "RUF002", # WONTFIX: Docstring contains ambiguous
+ "RUF003", # WONTFIX: Comment contains ambiguous
+ "T201", # TODO: `print` found
+ "A001", # TODO: Variable is shadowing a Python builtin
+ "A002", # TODO: overriding builtins (might need noqa tags)
+ "A003", # TODO: Class attribute `map` is shadowing a Python builtin
+ "S320", # TODO: Using `lxml` to parse untrusted data is known to be vulnerable to XML attacks
+ "S324", # TODO: Probable use of insecure hash functions in `hashlib`: `md5`
+ "S311", # TODO: Standard pseudo-random generators are not suitable for cryptographic purposes
+ "S301", # TODO: `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
+ "S105", # TODO: Possible hardcoded password assigned
+ "S307", # TODO: Use of possibly insecure function; consider using `ast.literal_eval`
+ "S310", # TODO: Audit URL open for permitted schemes.
+ "PLE2502", # TODO: Contains control characters that can permit obfuscated code
+ "INP001", # TODO: File is part of an implicit namespace package. Add an `__init__.py`.
+ "TID252", # TODO: Relative imports from parent modules are banned
+ "D409", # TODO: Section underline should match the length of its name
+ "D206", # CONFIG: formatter
+ 'ISC001', # CONFIG: formatter
+ 'Q000', # CONFIG: formatter
+ 'Q001', # CONFIG: formatter
+ 'Q002', # CONFIG: formatter
+ 'Q003', # CONFIG: formatter
+ 'W191', # CONFIG: formatter
+ "PTH" # TODO: Not using pathlib for now
+]
+select = ["ALL"]
diff --git a/requirements/dist.txt b/requirements/dist.txt
index a9c257655a..64e6eafe35 100644
--- a/requirements/dist.txt
+++ b/requirements/dist.txt
@@ -1,7 +1,7 @@
-build==1.0.3
-pip==23.3.1
-setuptools==68.2.2
+build==1.2.1
+pip==24.0
+setuptools==69.2.0
setuptools-scm==8.0.4
Sphinx==7.2.6
-twine==4.0.2
-virtualenv==20.24.6
+twine==5.0.0
+virtualenv==20.25.1
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 959c060882..68372830e3 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -1 +1 @@
-pre-commit==3.5.0
+pre-commit==3.7.0
diff --git a/requirements/optional.txt b/requirements/optional.txt
index 72e2587573..077a854be5 100644
--- a/requirements/optional.txt
+++ b/requirements/optional.txt
@@ -1,11 +1,11 @@
-r required.txt
# Format support
-aeidon==1.13 # Subtitles
+aeidon==1.14.1 # Subtitles
# Format support
BeautifulSoup4>=4.3 # Trados
# Encoding detection
-charset-normalizer==3.3.1 # chardet
+charset-normalizer==3.3.2 # chardet
# Tmserver backend
cheroot==10.0.0 # tmserver
# Format support
@@ -13,15 +13,15 @@ fluent.syntax==0.19.0 # Fluent
# Format support
iniparse==0.5 # INI
# Format support
-mistletoe==1.2.1 # Markdown
+mistletoe==1.3.0 # Markdown
# Format support
phply==1.2.6 # PHP
pyenchant==3.2.2 # spellcheck
# Windows Resources (rc2po and po2rc)
-pyparsing==3.1.1 # RC
+pyparsing==3.1.2 # RC
# Faster matching in e.g. pot2po
python-Levenshtein>=0.12 # Levenshtein
# Format support
-ruamel.yaml==0.18.2 # YAML
+ruamel.yaml==0.18.6 # YAML
# Format support
-vobject==0.9.6.1 # iCal
+vobject==0.9.7 # iCal
diff --git a/requirements/test.txt b/requirements/test.txt
index 5f376466d1..5fe623f338 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -1,5 +1,5 @@
-r optional.txt
-pytest==7.4.3
-pytest-cov==4.1.0
-pytest-xdist==3.3.1
-syrupy==4.6.0
+pytest==8.0.2
+pytest-cov==5.0.0
+pytest-xdist==3.5.0
+syrupy==4.6.1
diff --git a/setup.cfg b/setup.cfg
index 3eb8461a0c..10584bcfcb 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,6 +26,7 @@ classifiers =
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
+ Programming Language :: Python :: 3.12
Topic :: Software Development :: Internationalization
Topic :: Software Development :: Localization
Topic :: Software Development :: Libraries :: Python Modules
@@ -42,9 +43,6 @@ python_requires = >=3.8
include_package_data = 1
zip_safe = 0
scripts =
- tools/mozilla/build_firefox.sh
- tools/mozilla/buildxpi.py
- tools/mozilla/get_moz_enUS.py
tools/pocommentclean
tools/pocompendium
tools/pomigrate2
diff --git a/setup.py b/setup.py
index 32f760b890..1422b10747 100755
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,8 @@
def parse_requirements(file_name):
- """Parses a pip requirements file and returns a list of packages.
+ """
+ Parses a pip requirements file and returns a list of packages.
Use the result of this function in the ``install_requires`` field.
Copied from cburgmer/pdfserver.
@@ -49,7 +50,7 @@ def parse_extra_requires(filename):
for line in requirements:
line = line.strip()
# Skip comments, inclusion or blank lines
- if not line or line.startswith("-r") or line.startswith("#"):
+ if not line or line.startswith(("-r", "#")):
continue
dependency, section = line.split("#")
dependency = dependency.strip()
diff --git a/tests/cli/data/test_po2flatxml_preserve/out.xml b/tests/cli/data/test_po2flatxml_preserve/out.xml
index 0019c4a278..981246dc54 100644
--- a/tests/cli/data/test_po2flatxml_preserve/out.xml
+++ b/tests/cli/data/test_po2flatxml_preserve/out.xml
@@ -4,6 +4,6 @@
Zwei-modDrei
- 42
+ 42Vier-new
diff --git a/tests/odf_xliff/test_odf_xliff.py b/tests/odf_xliff/test_odf_xliff.py
index b5d4ab78b2..44f9f6dca1 100644
--- a/tests/odf_xliff/test_odf_xliff.py
+++ b/tests/odf_xliff/test_odf_xliff.py
@@ -19,21 +19,13 @@
import difflib
import os
-import os.path as path
-import sys
import zipfile
+from os import path
from lxml import etree
-# get directory of this test
-dir = os.path.dirname(os.path.abspath(__file__))
-# get top-level directory (moral equivalent of ../..)
-dir = os.path.dirname(os.path.dirname(dir))
-# load python modules from top-level
-sys.path.insert(0, dir)
-
-from translate.convert import odf2xliff, xliff2odf # isort:skip
-from translate.storage import factory, xliff # isort:skip
+from translate.convert import odf2xliff, xliff2odf
+from translate.storage import factory, xliff
def args(src, tgt, **kwargs):
@@ -133,10 +125,9 @@ def __eq__(self, other):
if l != r:
print("difference for file named", filename)
return False
- else:
- if self._get_data(filename) != other._get_data(filename):
- print("difference for file named", filename)
- return False
+ elif self._get_data(filename) != other._get_data(filename):
+ print("difference for file named", filename)
+ return False
return True
def __bytes__(self):
diff --git a/tests/translate/convert/test_accesskey.py b/tests/translate/convert/test_accesskey.py
index a30ef65b7d..047e72804f 100644
--- a/tests/translate/convert/test_accesskey.py
+++ b/tests/translate/convert/test_accesskey.py
@@ -18,7 +18,7 @@
"""
Test the various functions for combining and extracting accesskeys and
-labels
+labels.
"""
from translate.convert import accesskey
@@ -27,7 +27,7 @@
def test_get_label_and_accesskey():
"""
test that we can extract the label and accesskey components from an
- accesskey+label string
+ accesskey+label string.
"""
assert accesskey.extract("") == ("", "")
assert accesskey.extract("File") == ("File", "")
@@ -37,7 +37,7 @@ def test_get_label_and_accesskey():
def test_extract_bad_accesskeys():
- """Test what we do in situations that are bad fof accesskeys"""
+ """Test what we do in situations that are bad fof accesskeys."""
# Space is not valid accesskey so we don't extract anything
assert accesskey.extract("More& Whitespace") == ("More& Whitespace", "")
@@ -45,7 +45,7 @@ def test_extract_bad_accesskeys():
def test_ignore_entities():
"""
test that we don't get confused with entities and a & access key
- marker
+ marker.
"""
assert accesskey.extract("Set &browserName; as &Default") != (
"Set &browserName; as &Default",
@@ -58,43 +58,44 @@ def test_ignore_entities():
def test_alternate_accesskey_marker():
- """check that we can identify the accesskey if the marker is different"""
+ """Check that we can identify the accesskey if the marker is different."""
assert accesskey.extract("~File", "~") == ("File", "F")
assert accesskey.extract("&File", "~") == ("&File", "")
def test_unicode():
- """test that we can do the same with unicode strings"""
+ """Test that we can do the same with unicode strings."""
assert accesskey.extract("Eḓiṱ") == ("Eḓiṱ", "")
assert accesskey.extract("E&ḓiṱ") == ("Eḓiṱ", "ḓ")
assert accesskey.extract("E_ḓiṱ", "_") == ("Eḓiṱ", "ḓ")
label, akey = accesskey.extract("E&ḓiṱ")
assert label, akey == ("Eḓiṱ", "ḓ")
- assert isinstance(label, str) and isinstance(akey, str)
+ assert isinstance(label, str)
+ assert isinstance(akey, str)
assert accesskey.combine("Eḓiṱ", "ḓ") == ("E&ḓiṱ")
def test_numeric():
- """test combining and extracting numeric markers"""
+ """Test combining and extracting numeric markers."""
assert accesskey.extract("&100%") == ("100%", "1")
assert accesskey.combine("100%", "1") == "&100%"
def test_empty_string():
- """test that we can handle and empty label+accesskey string"""
+ """Test that we can handle and empty label+accesskey string."""
assert accesskey.extract("") == ("", "")
assert accesskey.extract("", "~") == ("", "")
def test_end_of_string():
- """test that we can handle an accesskey at the end of the string"""
+ """Test that we can handle an accesskey at the end of the string."""
assert accesskey.extract("Hlola&") == ("Hlola&", "")
def test_combine_label_accesskey():
"""
test that we can combine accesskey and label to create a label+accesskey
- string
+ string.
"""
assert accesskey.combine("File", "F") == "&File"
assert accesskey.combine("File", "F", "~") == "~File"
@@ -115,13 +116,13 @@ def test_combine_label_accesskey_different_capitals():
def test_uncombinable():
- """test our behaviour when we cannot combine label and accesskey"""
+ """Test our behaviour when we cannot combine label and accesskey."""
assert accesskey.combine("File", "D") is None
assert accesskey.combine("File", "") is None
assert accesskey.combine("", "") is None
def test_accesskey_already_in_text():
- """test that we can combine if the accesskey is already in the text"""
+ """Test that we can combine if the accesskey is already in the text."""
assert accesskey.combine("Mail & Newsgroups", "N") == "Mail & &Newsgroups"
assert accesskey.extract("Mail & &Newsgroups") == ("Mail & Newsgroups", "N")
diff --git a/tests/translate/convert/test_android2po.py b/tests/translate/convert/test_android2po.py
index e20e04ad7e..4667c9467d 100644
--- a/tests/translate/convert/test_android2po.py
+++ b/tests/translate/convert/test_android2po.py
@@ -8,15 +8,13 @@
class TestAndroid2PO:
@staticmethod
def android2po(source, template=None):
- """helper that converts android source to po source without requiring files"""
+ """Helper that converts android source to po source without requiring files."""
inputfile = BytesIO(source.encode())
templatefile = BytesIO(template.encode()) if template else None
- outputpo = android2po._convertandroid(inputfile, templatefile)
- return outputpo
+ return android2po._convertandroid(inputfile, templatefile)
def test_no_template_units(self):
- """test that we can handle android with no template"""
-
+ """Test that we can handle android with no template."""
_input = """
Multimedia tab
@@ -27,8 +25,7 @@ def test_no_template_units(self):
assert poresult.units[1].source == "Multimedia tab"
def test_template_units(self):
- """test that we can handle android with template"""
-
+ """Test that we can handle android with template."""
template = """
Multimedia tab
@@ -45,7 +42,7 @@ def test_template_units(self):
class TestAndroid2POCommand(test_convert.TestConvertCommand, TestAndroid2PO):
- """Tests running actual android2po commands on files"""
+ """Tests running actual android2po commands on files."""
convertmodule = android2po
diff --git a/tests/translate/convert/test_convert.py b/tests/translate/convert/test_convert.py
index 5744912753..58fdacb1e6 100644
--- a/tests/translate/convert/test_convert.py
+++ b/tests/translate/convert/test_convert.py
@@ -10,26 +10,26 @@
class TestConvertCommand:
- """Tests running actual commands on files"""
+ """Tests running actual commands on files."""
convertmodule = convert
defaultoptions = {"progress": "none"}
expected_options = []
def setup_method(self, method):
- """creates a clean test directory for the given method"""
+ """Creates a clean test directory for the given method."""
self.testdir = f"{self.__class__.__name__}_{method.__name__}"
self.cleardir()
os.mkdir(self.testdir)
self.rundir = os.path.abspath(os.getcwd())
def teardown_method(self, method):
- """removes the test directory for the given method"""
+ """Removes the test directory for the given method."""
os.chdir(self.rundir)
self.cleardir()
def cleardir(self):
- """removes the test directory"""
+ """Removes the test directory."""
if os.path.exists(self.testdir):
for dirpath, subdirs, filenames in os.walk(self.testdir, topdown=False):
for name in filenames:
@@ -41,7 +41,7 @@ def cleardir(self):
assert not os.path.exists(self.testdir)
def run_command(self, *argv, **kwargs):
- """runs the command via the main function, passing self.defaultoptions and keyword arguments as --long options and argv arguments straight"""
+ """Runs the command via the main function, passing self.defaultoptions and keyword arguments as --long options and argv arguments straight."""
os.chdir(self.testdir)
argv = list(argv)
kwoptions = getattr(self, "defaultoptions", {}).copy()
@@ -57,11 +57,11 @@ def run_command(self, *argv, **kwargs):
os.chdir(self.rundir)
def get_testfilename(self, filename):
- """gets the path to the test file"""
+ """Gets the path to the test file."""
return os.path.join(self.testdir, filename)
def open_testfile(self, filename, mode="rb"):
- """opens the given filename in the testdirectory in the given mode"""
+ """Opens the given filename in the testdirectory in the given mode."""
filename = self.get_testfilename(filename)
if not mode.startswith("r"):
subdir = os.path.dirname(filename)
@@ -74,7 +74,7 @@ def open_testfile(self, filename, mode="rb"):
return open(filename, mode)
def create_testfile(self, filename, contents):
- """creates the given file in the testdirectory with the given contents"""
+ """Creates the given file in the testdirectory with the given contents."""
if isinstance(contents, str):
contents = contents.encode("utf-8")
testfile = self.open_testfile(filename, "wb")
@@ -82,14 +82,13 @@ def create_testfile(self, filename, contents):
testfile.close()
def read_testfile(self, filename):
- """reads the given file in the testdirectory and returns the contents"""
+ """Reads the given file in the testdirectory and returns the contents."""
with open(self.get_testfilename(filename), "rb") as testfile:
- content = testfile.read()
- return content
+ return testfile.read()
@staticmethod
def help_check(options, option, last=False):
- """check that a help string occurs and remove it"""
+ """Check that a help string occurs and remove it."""
assert option in options
newoptions = []
for line in options.splitlines():
@@ -101,7 +100,7 @@ def help_check(options, option, last=False):
return "\n".join(newoptions)
def test_help(self, capsys):
- """tests getting help (returning the help_string so further tests can be done)"""
+ """Tests getting help (returning the help_string so further tests can be done)."""
with pytest.raises(SystemExit):
self.run_command(help=True)
help_string, err = capsys.readouterr()
diff --git a/tests/translate/convert/test_csv2po.py b/tests/translate/convert/test_csv2po.py
index bd8d852b80..0969983069 100644
--- a/tests/translate/convert/test_csv2po.py
+++ b/tests/translate/convert/test_csv2po.py
@@ -8,7 +8,7 @@
def test_replacestrings():
- """Test the _replacestring function"""
+ """Test the _replacestring function."""
assert (
csv2po.replacestrings("Test one two three", ("one", "een"), ("two", "twee"))
== "Test een twee three"
@@ -18,7 +18,7 @@ def test_replacestrings():
class TestCSV2PO:
@staticmethod
def csv2po(csvsource, template=None):
- """helper that converts csv source to po source without requiring files"""
+ """Helper that converts csv source to po source without requiring files."""
inputfile = BytesIO(csvsource.encode())
inputcsv = csvl10n.csvfile(inputfile)
if template:
@@ -31,13 +31,13 @@ def csv2po(csvsource, template=None):
@staticmethod
def singleelement(storage):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
print(bytes(storage))
assert headerless_len(storage.units) == 1
return first_translatable(storage)
def test_simpleentity(self):
- """checks that a simple csv entry definition converts properly to a po entry"""
+ """Checks that a simple csv entry definition converts properly to a po entry."""
csvheader = "location,source,target\n"
csvsource = "intl.charset.default,ISO-8859-1,UTF-16"
# Headerless
@@ -51,7 +51,7 @@ def test_simpleentity(self):
assert pounit.target == "UTF-16"
def test_simpleentity_with_template(self):
- """checks that a simple csv entry definition converts properly to a po entry"""
+ """Checks that a simple csv entry definition converts properly to a po entry."""
csvsource = """location,original,translation
intl.charset.default,ISO-8859-1,UTF-16"""
potsource = """#: intl.charset.default
@@ -65,7 +65,7 @@ def test_simpleentity_with_template(self):
assert pounit.target == "UTF-16"
def test_newlines(self):
- """tests multiline po entries"""
+ """Tests multiline po entries."""
minicsv = r""""Random comment
with continuation","Original text","Langdradige teks
wat lank aanhou"
@@ -78,19 +78,19 @@ def test_newlines(self):
assert unit.target == "Langdradige teks\nwat lank aanhou"
def test_tabs(self):
- """Test the escaping of tabs"""
+ """Test the escaping of tabs."""
minicsv = ',"First column\tSecond column","Twee kolomme gesky met \t"'
pofile = self.csv2po(minicsv)
unit = self.singleelement(pofile)
print(unit.source)
assert unit.source == "First column\tSecond column"
assert (
- not pofile.findunit("First column\tSecond column").target
- == "Twee kolomme gesky met \\t"
+ pofile.findunit("First column\tSecond column").target
+ != "Twee kolomme gesky met \\t"
)
def test_quotes(self):
- """Test the escaping of quotes (and slash)"""
+ """Test the escaping of quotes (and slash)."""
minicsv = r''',"Hello ""Everyone""","Good day ""All"""
,"Use \"".","Gebruik \""."'''
print(minicsv)
@@ -109,7 +109,7 @@ def test_quotes(self):
# assert pofile.findunit('Use \\".').target == 'Gebruik \\".'
def test_empties(self):
- """Tests that things keep working with empty entries"""
+ """Tests that things keep working with empty entries."""
minicsv = ",SomeSource,"
pofile = self.csv2po(minicsv)
assert pofile.findunit("SomeSource") is not None
@@ -117,7 +117,7 @@ def test_empties(self):
assert headerless_len(pofile.units) == 1
def test_kdecomment(self):
- """checks that we can merge into KDE comment entries"""
+ """Checks that we can merge into KDE comment entries."""
csvsource = """location,source,target
simple.c,Source,Target"""
potsource = r"""#: simple.c
@@ -132,7 +132,7 @@ def test_kdecomment(self):
assert pounit.target == "Target"
def test_escaped_newlines(self):
- """Tests that things keep working with escaped newlines"""
+ """Tests that things keep working with escaped newlines."""
minicsv = '"source","target"\r\n"yellow pencil","żółty\\nołówek"'
pofile = self.csv2po(minicsv)
assert pofile.findunit("yellow pencil") is not None
@@ -141,7 +141,7 @@ def test_escaped_newlines(self):
class TestCSV2POCommand(test_convert.TestConvertCommand, TestCSV2PO):
- """Tests running actual csv2po commands on files"""
+ """Tests running actual csv2po commands on files."""
convertmodule = csv2po
diff --git a/tests/translate/convert/test_dtd2po.py b/tests/translate/convert/test_dtd2po.py
index 3461295ec8..5e0ea67a97 100644
--- a/tests/translate/convert/test_dtd2po.py
+++ b/tests/translate/convert/test_dtd2po.py
@@ -11,7 +11,7 @@
class TestDTD2PO:
@staticmethod
def dtd2po(dtdsource, dtdtemplate=None):
- """helper that converts dtd source to po source without requiring files"""
+ """Helper that converts dtd source to po source without requiring files."""
inputfile = BytesIO(dtdsource.encode())
inputdtd = dtd.dtdfile(inputfile)
convertor = dtd2po.dtd2po()
@@ -25,7 +25,7 @@ def dtd2po(dtdsource, dtdtemplate=None):
@staticmethod
def convertdtd(dtdsource):
- """call the convertdtd, return the outputfile"""
+ """Call the convertdtd, return the outputfile."""
inputfile = BytesIO(dtdsource.encode())
outputfile = BytesIO()
templatefile = None
@@ -34,7 +34,7 @@ def convertdtd(dtdsource):
@staticmethod
def singleelement(pofile):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
assert len(pofile.units) == 2
assert pofile.units[0].isheader()
print(pofile.units[1])
@@ -42,14 +42,13 @@ def singleelement(pofile):
@staticmethod
def countelements(pofile):
- """returns the number of non-header items"""
+ """Returns the number of non-header items."""
if pofile.units[0].isheader():
return len(pofile.units) - 1
- else:
- return len(pofile.units)
+ return len(pofile.units)
def test_simpleentity(self):
- """checks that a simple dtd entity definition converts properly to a po entry"""
+ """Checks that a simple dtd entity definition converts properly to a po entry."""
dtdsource = '\n'
pofile = self.dtd2po(dtdsource)
pounit = self.singleelement(pofile)
@@ -64,7 +63,7 @@ def test_simpleentity(self):
assert pounit.target == "piesangs te koop"
def test_convertdtd(self):
- """checks that the convertdtd function is working"""
+ """Checks that the convertdtd function is working."""
dtdsource = '\n'
posource = self.convertdtd(dtdsource)
pofile = po.pofile(BytesIO(posource))
@@ -73,14 +72,14 @@ def test_convertdtd(self):
assert unit.target == ""
def test_apos(self):
- """apostrophe should not break a single-quoted entity definition, bug 69"""
+ """Apostrophe should not break a single-quoted entity definition, bug 69."""
dtdsource = "\n"
pofile = self.dtd2po(dtdsource)
pounit = self.singleelement(pofile)
assert pounit.source == "bananas ' for sale"
def test_quotes(self):
- """quotes should be handled in a single-quoted entity definition"""
+ """Quotes should be handled in a single-quoted entity definition."""
dtdsource = """\n"""
pofile = self.dtd2po(dtdsource)
pounit = self.singleelement(pofile)
@@ -88,7 +87,7 @@ def test_quotes(self):
assert pounit.source == '"Bananas" for sale'
def test_emptyentity(self):
- """checks that empty entity definitions survive into po file, bug 15"""
+ """Checks that empty entity definitions survive into po file, bug 15."""
dtdsource = '\n'
pofile = self.dtd2po(dtdsource)
pounit = self.singleelement(pofile)
@@ -96,14 +95,14 @@ def test_emptyentity(self):
assert 'msgctxt "credit.translation"' in str(pounit)
def test_two_empty_entities(self):
- """checks that two empty entitu definitions have correct context (bug 2190)."""
+ """Checks that two empty entitu definitions have correct context (bug 2190)."""
dtdsource = '\n\n'
pofile = self.dtd2po(dtdsource)
assert pofile.units[-2].getcontext() == "community.exp.start"
assert pofile.units[-1].getcontext() == "contribute.end"
def test_emptyentity_translated(self):
- """checks that if we translate an empty entity it makes it into the PO, bug 101"""
+ """Checks that if we translate an empty entity it makes it into the PO, bug 101."""
dtdtemplate = '\n'
dtdsource = '\n'
pofile = self.dtd2po(dtdsource, dtdtemplate)
@@ -115,7 +114,7 @@ def test_emptyentity_translated(self):
assert unit.target == "Translators Names"
def test_localisaton_note_simple(self):
- """test the simple localisation more becomes a #. comment"""
+ """Test the simple localisation more becomes a #. comment."""
dtdsource = """\n"
- + '\n'
- )
+ """Test that LOCALIZATION NOTES are added properly as #. comments and disambiguated with msgctxt entries."""
+ dtdtemplate = """\n\n"""
dtdsource = dtdtemplate % ("note1.label", "note1.label") + dtdtemplate % (
"note2.label",
"note2.label",
@@ -147,7 +143,7 @@ def test_localisation_note_merge(self):
assert posource.count("msgctxt") == 2
def test_donttranslate_simple(self):
- """check that we handle DONT_TRANSLATE messages properly"""
+ """Check that we handle DONT_TRANSLATE messages properly."""
dtdsource = """
"""
pofile = self.dtd2po(dtdsource)
@@ -162,10 +158,10 @@ def test_donttranslate_simple(self):
assert self.countelements(pofile) == 1
def test_donttranslate_label(self):
- """test strangeness when label entity is marked DONT_TRANSLATE and accesskey is not, bug 30"""
+ """Test strangeness when label entity is marked DONT_TRANSLATE and accesskey is not, bug 30."""
dtdsource = (
"\n"
- + '\n\n'
+ '\n\n'
)
pofile = self.dtd2po(dtdsource)
posource = bytes(pofile).decode("utf-8")
@@ -175,21 +171,21 @@ def test_donttranslate_label(self):
assert "editorCheck.accesskey" in posource
def test_donttranslate_onlyentity(self):
- """if the entity is itself just another entity then it shouldn't appear in the output PO file"""
+ """If the entity is itself just another entity then it shouldn't appear in the output PO file."""
dtdsource = """
"""
pofile = self.dtd2po(dtdsource)
assert self.countelements(pofile) == 0
def test_donttranslate_commentedout(self):
- """check that we don't process messages in : bug 102"""
+ """Check that we don't process messages in : bug 102."""
dtdsource = """"""
pofile = self.dtd2po(dtdsource)
assert self.countelements(pofile) == 0
def test_spaces_at_start_of_dtd_lines(self):
- """test that pretty print spaces at the start of subsequent DTD element lines are removed from the PO file, bug 79"""
+ """Test that pretty print spaces at the start of subsequent DTD element lines are removed from the PO file, bug 79."""
# Space at the end of the line
dtdsource = (
'\n\n'
)
@@ -232,15 +228,15 @@ def test_accesskeys_folding(self):
assert pounit.target == "&Gcina ka..."
def test_accesskeys_mismatch(self):
- """check that we can handle accesskeys that don't match and thus can't be folded into the .label entry"""
+ """Check that we can handle accesskeys that don't match and thus can't be folded into the .label entry."""
dtdsource = (
- '\n' '\n'
+ '\n\n'
)
pofile = self.dtd2po(dtdsource)
assert self.countelements(pofile) == 2
def test_carriage_return_in_multiline_dtd(self):
- r"""test that we create nice PO files when we find a \r\n in a multiline DTD element"""
+ r"""Test that we create nice PO files when we find a \r\n in a multiline DTD element."""
dtdsource = (
'\n'
@@ -250,7 +246,7 @@ def test_carriage_return_in_multiline_dtd(self):
assert unit.source == "First line then next lines."
def test_multiline_with_blankline(self):
- """test that we can process a multiline entity that has a blank line in it, bug 331"""
+ """Test that we can process a multiline entity that has a blank line in it, bug 331."""
dtdsource = """
opsiespreferences' >
@@ -274,7 +270,7 @@ def test_multiline_closing_quotes(self):
)
def test_preserving_spaces(self):
- """test that we preserve space that appear at the start of the first line of a DTD entity"""
+ """Test that we preserve space that appear at the start of the first line of a DTD entity."""
# Space before first character
dtdsource = ''
pofile = self.dtd2po(dtdsource)
@@ -289,7 +285,7 @@ def test_preserving_spaces(self):
@staticmethod
def test_escaping_newline_tabs():
- """test that we handle all kinds of newline permutations"""
+ """Test that we handle all kinds of newline permutations."""
dtdsource = '\n'
converter = dtd2po.dtd2po()
thedtd = dtd.dtdunit()
@@ -305,7 +301,7 @@ def test_escaping_newline_tabs():
)
def test_abandoned_accelerator(self):
- """test that when a language DTD has an accelerator but the template DTD does not that we abandon the accelerator"""
+ """Test that when a language DTD has an accelerator but the template DTD does not that we abandon the accelerator."""
dtdtemplate = '\n'
dtdlanguage = '\n\n'
pofile = self.dtd2po(dtdlanguage, dtdtemplate)
@@ -314,7 +310,7 @@ def test_abandoned_accelerator(self):
assert unit.target == "Toets"
def test_unassociable_accelerator(self):
- """test to see that we can handle accelerator keys that cannot be associated correctly"""
+ """Test to see that we can handle accelerator keys that cannot be associated correctly."""
dtdsource = '\n'
pofile = self.dtd2po(dtdsource)
assert pofile.units[1].source == "Manage Certificates..."
@@ -324,7 +320,7 @@ def test_unassociable_accelerator(self):
assert pofile.units[2].target == "M"
def test_changed_labels_and_accelerators(self):
- """test to ensure that when the template changes an entity name we can still manage the accelerators"""
+ """Test to ensure that when the template changes an entity name we can still manage the accelerators."""
dtdtemplate = """
@@ -342,7 +338,7 @@ def test_changed_labels_and_accelerators(self):
@mark.xfail(reason="Not Implemented")
def test_accelerator_keys_not_in_sentence(self):
- """tests to ensure that we can manage accelerator keys that are not part of the transated sentence eg in Chinese"""
+ """Tests to ensure that we can manage accelerator keys that are not part of the transated sentence eg in Chinese."""
dtdtemplate = """
"""
dtdlanguage = """
@@ -360,13 +356,13 @@ def test_accelerator_keys_not_in_sentence(self):
assert pofile.units[1].target == "使用自動捲動 (&A)".decode("utf-8")
def test_exclude_entity_includes(self):
- """test that we don't turn an include into a translatable string"""
+ """Test that we don't turn an include into a translatable string."""
dtdsource = ''
pofile = self.dtd2po(dtdsource)
assert self.countelements(pofile) == 0
def test_linewraps(self):
- """check that redundant line wraps are removed from the po file"""
+ """Check that redundant line wraps are removed from the po file."""
dtdsource = """Test me.
"
def test_merging_with_new_untranslated(self):
- """test that when we merge in new untranslated strings with existing translations we manage the encodings properly"""
+ """Test that when we merge in new untranslated strings with existing translations we manage the encodings properly."""
# This should probably be in test_po.py but was easier to do here
dtdtemplate = """\n\n"""
dtdlanguage = """\n"""
@@ -384,7 +380,7 @@ def test_merging_with_new_untranslated(self):
assert pofile.units[1].source == "Unread"
def test_merge_without_template(self):
- """test that we we manage the case where we merge and their is no template file"""
+ """Test that we we manage the case where we merge and their is no template file."""
# If we supply a template file we should fail if the template file does not exist or is blank. We should
# not put the translation in as the source.
# TODO: this test fails, since line 16 checks for "not dtdtemplate"
@@ -397,7 +393,7 @@ def test_merge_without_template(self):
class TestDTD2POCommand(test_convert.TestConvertCommand, TestDTD2PO):
- """Tests running actual dtd2po commands on files"""
+ """Tests running actual dtd2po commands on files."""
convertmodule = dtd2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_flatxml2po.py b/tests/translate/convert/test_flatxml2po.py
index 723fc8e9c0..c68efec0c7 100644
--- a/tests/translate/convert/test_flatxml2po.py
+++ b/tests/translate/convert/test_flatxml2po.py
@@ -1,4 +1,4 @@
-"""Tests converting flat XML files to Gettext PO localization files"""
+"""Tests converting flat XML files to Gettext PO localization files."""
from io import BytesIO
@@ -10,7 +10,7 @@
class TestFlatXML2PO:
@staticmethod
def _convert(xmlstring, templatestring=None, **kwargs):
- """Helper that converts xml source to po target without requiring files"""
+ """Helper that converts xml source to po target without requiring files."""
inputfile = BytesIO(xmlstring.encode())
templatefile = None
if templatestring:
@@ -30,10 +30,11 @@ def _convert_to_string(self, *args, **kwargs):
@staticmethod
def _do_assert_store(actual):
- """Asserts whether the passed actual store contains two assumed units:
+ """
+ Asserts whether the passed actual store contains two assumed units:
'one' => 'One'
'two' => 'Two'
- (plus a header present by default)
+ (plus a header present by default).
"""
assert actual.units[0].isheader()
assert len(actual.units) == 3
@@ -117,7 +118,8 @@ def test_all_parameters(self):
self._do_assert_store(actual)
def test_empty_file_is_empty_store(self):
- """Test a conversion that starts with an empty file.
+ """
+ Test a conversion that starts with an empty file.
This must not trigger the element name validation
or cause other issues. An empty store is expected.
@@ -130,7 +132,7 @@ def test_empty_file_is_empty_store(self):
class TestFlatXML2POCommand(test_convert.TestConvertCommand):
- """Tests running actual flatxml2po commands on files"""
+ """Tests running actual flatxml2po commands on files."""
convertmodule = flatxml2po
diff --git a/tests/translate/convert/test_html2po.py b/tests/translate/convert/test_html2po.py
index 417e4a212a..20df14bed9 100644
--- a/tests/translate/convert/test_html2po.py
+++ b/tests/translate/convert/test_html2po.py
@@ -30,7 +30,7 @@ def po2html(posource, htmltemplate):
@staticmethod
def countunits(pofile, expected):
- """helper to check that we got the expected number of messages"""
+ """Helper to check that we got the expected number of messages."""
actual = len(pofile.units)
if actual > 0:
if pofile.units[0].isheader():
@@ -40,7 +40,7 @@ def countunits(pofile, expected):
@staticmethod
def compareunit(pofile, unitnumber, expected):
- """helper to validate a PO message"""
+ """Helper to validate a PO message."""
if not pofile.units[0].isheader():
unitnumber = unitnumber - 1
print("unit source: " + pofile.units[unitnumber].source + "|")
@@ -48,18 +48,18 @@ def compareunit(pofile, unitnumber, expected):
assert str(pofile.units[unitnumber].source) == str(expected)
def check_single(self, markup, itemtext):
- """checks that converting this markup produces a single element with value itemtext"""
+ """Checks that converting this markup produces a single element with value itemtext."""
pofile = self.html2po(markup)
self.countunits(pofile, 1)
self.compareunit(pofile, 1, itemtext)
def check_null(self, markup):
- """checks that converting this markup produces no elements"""
+ """Checks that converting this markup produces no elements."""
pofile = self.html2po(markup)
self.countunits(pofile, 0)
def check_phpsnippet(self, php):
- """Given a snippet of php, put it into an HTML shell and see if the results are as expected"""
+ """Given a snippet of php, put it into an HTML shell and see if the results are as expected."""
self.check_single(
'
',
'A paragraph with hyperlink.',
@@ -223,7 +223,7 @@ def test_tag_a_with_linebreak(self):
)
def test_sequence_of_anchor_elements(self):
- """test that we can extract a sequence of anchor elements without mixing up start/end tags, issue #3768"""
+ """Test that we can extract a sequence of anchor elements without mixing up start/end tags, issue #3768."""
self.check_single(
'
',
'This is a link but this is not. However this is too',
@@ -280,11 +280,11 @@ def test_table_complex(self):
self.compareunit(pofile, 9, "Two")
def test_table_empty(self):
- """Test that we ignore tables that are empty.
+ """
+ Test that we ignore tables that are empty.
A table is deemed empty if it has no translatable content.
"""
-
self.check_null(
"""
"""
)
@@ -296,11 +296,11 @@ def test_table_empty(self):
)
def test_address(self):
- """Test to see if the address element is extracted"""
+ """Test to see if the address element is extracted."""
self.check_single("My address", "My address")
def test_headings(self):
- """Test to see if the h* elements are extracted"""
+ """Test to see if the h* elements are extracted."""
markup = "
Heading One
Heading Two
Heading Three
Heading Four
Heading Five
Heading Six
"
pofile = self.html2po(markup)
self.countunits(pofile, 6)
@@ -312,7 +312,7 @@ def test_headings(self):
self.compareunit(pofile, 6, "Heading Six")
def test_headings_with_linebreaks(self):
- """Test to see if h* elements with newlines can be extracted"""
+ """Test to see if h* elements with newlines can be extracted."""
markup = "
Heading\nOne
Heading\nTwo
Heading\nThree
Heading\nFour
Heading\nFive
Heading\nSix
"
pofile = self.html2po(markup)
self.countunits(pofile, 6)
@@ -324,28 +324,28 @@ def test_headings_with_linebreaks(self):
self.compareunit(pofile, 6, "Heading Six")
def test_dt(self):
- """Test to see if the definition list title (dt) element is extracted"""
+ """Test to see if the definition list title (dt) element is extracted."""
self.check_single(
"
Definition List Item Title
",
"Definition List Item Title",
)
def test_dd(self):
- """Test to see if the definition list description (dd) element is extracted"""
+ """Test to see if the definition list description (dd) element is extracted."""
self.check_single(
"
Definition List Item Description
",
"Definition List Item Description",
)
def test_span(self):
- """test to check that we don't double extract a span item"""
+ """Test to check that we don't double extract a span item."""
self.check_single(
"
You are a Spanish sentence.
",
"You are a Spanish sentence.",
)
def test_ul(self):
- """Test to see if the list item
is extracted"""
+ """Test to see if the list item
is extracted."""
markup = "
Unordered One
Unordered Two
Ordered One
Ordered Two
"
pofile = self.html2po(markup)
self.countunits(pofile, 4)
@@ -355,7 +355,7 @@ def test_ul(self):
self.compareunit(pofile, 4, "Ordered Two")
def test_nested_lists(self):
- """Nested lists should be extracted correctly"""
+ """Nested lists should be extracted correctly."""
markup = """Nested lists
Vegetables
@@ -382,7 +382,7 @@ def test_nested_lists(self):
self.compareunit(pofile, 8, "Meat")
def test_duplicates(self):
- """check that we use the default style of msgctxt to disambiguate duplicate messages"""
+ """Check that we use the default style of msgctxt to disambiguate duplicate messages."""
markup = (
"
Duplicate
Duplicate
"
)
@@ -395,7 +395,7 @@ def test_duplicates(self):
assert pofile.units[2].getlocations() == ["None+html.body.p:1-42"]
def test_multiline_reflow(self):
- """check that we reflow multiline content to make it more readable for translators"""
+ """Check that we reflow multiline content to make it more readable for translators."""
self.check_single(
"""
South
Africa
""",
@@ -403,7 +403,7 @@ def test_multiline_reflow(self):
)
def test_nested_tags(self):
- """check that we can extract items within nested tags"""
+ """Check that we can extract items within nested tags."""
markup = "
Extract this
And this
"
pofile = self.html2po(markup)
self.countunits(pofile, 2)
@@ -472,7 +472,6 @@ def test_encoding_latin1(self):
def test_strip_html(self):
"""Ensure that unnecessary html is stripped from the resulting unit."""
-
htmlsource = """
@@ -535,7 +534,7 @@ def test_entityrefs_in_text(self):
)
def test_entityrefs_in_attributes(self):
- """Should convert html entityrefs in attribute values"""
+ """Should convert html entityrefs in attribute values."""
# it would be even nicer if " and ' could be preserved, but the automatic unescaping of
# attributes is deep inside html.HTMLParser.
self.check_single(
@@ -544,15 +543,14 @@ def test_entityrefs_in_attributes(self):
)
def test_charrefs(self):
- """Should extract html charrefs"""
+ """Should extract html charrefs."""
self.check_single(
"
’ ’
",
"\u2019 \u2019",
)
def test_php(self):
- """Test that PHP snippets don't interfere"""
-
+ """Test that PHP snippets don't interfere."""
# A simple string
self.check_phpsnippet("""=$phpvariable?>""")
@@ -566,7 +564,7 @@ def test_php(self):
)
def test_multiple_php(self):
- """Test multiple PHP snippets in a string to make sure they get restored properly"""
+ """Test multiple PHP snippets in a string to make sure they get restored properly."""
php1 = """=$phpvariable?>"""
php2 = """=($a < $b ? $foo : ($b > c ? $bar : $cat))?>"""
php3 = """ asdfghjklqwertyuiop1234567890!@#$%^&*()-=_+[]\\{}|;':",./<>? ?>"""
@@ -623,14 +621,14 @@ def test_php_multiline(self):
assert htmlresult == transsource
def test_php_with_embedded_html(self):
- """Should not consume HTML within processing instructions"""
+ """Should not consume HTML within processing instructions."""
self.check_single(
"
a
b
?> c",
"a
b
?> c",
)
def test_comments(self):
- """Test that HTML comments are converted to translator notes in output"""
+ """Test that HTML comments are converted to translator notes in output."""
pofile = self.html2po(
"
A paragraph.
",
keepcomments=True,
@@ -649,7 +647,7 @@ def test_attribute_without_value(self):
class TestHTML2POCommand(test_convert.TestConvertCommand, TestHTML2PO):
- """Tests running actual html2po commands on files"""
+ """Tests running actual html2po commands on files."""
convertmodule = html2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_ical2po.py b/tests/translate/convert/test_ical2po.py
index 446cce0af9..56beeb29fa 100644
--- a/tests/translate/convert/test_ical2po.py
+++ b/tests/translate/convert/test_ical2po.py
@@ -49,9 +49,7 @@ def test_no_translations(self):
BEGIN:VEVENT
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
output = self._convert_to_string(input_string, success_expected=False)
assert output == ""
@@ -70,9 +68,7 @@ def test_summary(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -98,9 +94,7 @@ def test_description(self):
ORGANIZER;CN=John Doe:MAILTO:john.doe@example.com
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]DESCRIPTION
@@ -126,9 +120,7 @@ def test_location(self):
ORGANIZER;CN=John Doe:MAILTO:john.doe@example.com
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]LOCATION
@@ -154,9 +146,7 @@ def test_comment(self):
ORGANIZER;CN=John Doe:MAILTO:john.doe@example.com
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]COMMENT
@@ -190,9 +180,7 @@ def test_no_template_duplicate_style(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -241,9 +229,7 @@ def test_merge(self):
SUMMARY:Valor
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
template_string = """
BEGIN:VCALENDAR
VERSION:2.0
@@ -257,9 +243,7 @@ def test_merge(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-18 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -285,9 +269,7 @@ def test_merge_misaligned_files(self):
SUMMARY:Valor
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
template_string = """
BEGIN:VCALENDAR
VERSION:2.0
@@ -301,9 +283,7 @@ def test_merge_misaligned_files(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-18 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -329,9 +309,7 @@ def test_merge_blank_msgstr(self):
SUMMARY:Valor
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
template_string = """
BEGIN:VCALENDAR
VERSION:2.0
@@ -345,9 +323,7 @@ def test_merge_blank_msgstr(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-18 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -383,9 +359,7 @@ def test_merge_duplicate_style(self):
SUMMARY:Valioso
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
template_string = """
BEGIN:VCALENDAR
VERSION:2.0
@@ -407,9 +381,7 @@ def test_merge_duplicate_style(self):
SUMMARY:Value
END:VEVENT
END:VCALENDAR
-""".replace(
- "\n", "\r\n"
- )
+""".replace("\n", "\r\n")
expected_output = """
#. Start date: 1997-07-14 17:00:00+00:00
#: [uid1@example.com]SUMMARY
@@ -450,7 +422,7 @@ def test_merge_duplicate_style(self):
class TestIcal2POCommand(test_convert.TestConvertCommand, TestIcal2PO):
- """Tests running actual ical2po commands on files"""
+ """Tests running actual ical2po commands on files."""
convertmodule = ical2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_idml2po.py b/tests/translate/convert/test_idml2po.py
index df6865290f..d00ae9941a 100644
--- a/tests/translate/convert/test_idml2po.py
+++ b/tests/translate/convert/test_idml2po.py
@@ -6,7 +6,7 @@
class TestIDML2POCommand(test_convert.TestConvertCommand):
- """Tests running actual idml2po commands on files"""
+ """Tests running actual idml2po commands on files."""
convertmodule = idml2po
diff --git a/tests/translate/convert/test_ini2po.py b/tests/translate/convert/test_ini2po.py
index 163b5f1602..e0df71b630 100644
--- a/tests/translate/convert/test_ini2po.py
+++ b/tests/translate/convert/test_ini2po.py
@@ -143,7 +143,7 @@ def test_dialects_inno(self):
class TestIni2POCommand(test_convert.TestConvertCommand, TestIni2PO):
- """Tests running actual ini2po commands on files"""
+ """Tests running actual ini2po commands on files."""
convertmodule = ini2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_json2po.py b/tests/translate/convert/test_json2po.py
index 2e4e546145..0856ac8190 100644
--- a/tests/translate/convert/test_json2po.py
+++ b/tests/translate/convert/test_json2po.py
@@ -1,3 +1,23 @@
+#
+# Copyright 2023 Stuart Prescott
+# Copyright 2023 Michal Čihař
+# Copyright 2024 gemmaro
+#
+# This file is part of translate.
+#
+# translate is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# translate is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see .
+
from io import BytesIO
from translate.convert import json2po
@@ -9,22 +29,21 @@
class TestJson2PO:
@staticmethod
def json2po(jsonsource, template=None, filter=None):
- """helper that converts json source to po source without requiring files"""
+ """Helper that converts json source to po source without requiring files."""
inputfile = BytesIO(jsonsource.encode())
inputjson = jsonl10n.JsonFile(inputfile, filter=filter)
convertor = json2po.json2po()
- outputpo = convertor.convert_store(inputjson)
- return outputpo
+ return convertor.convert_store(inputjson)
@staticmethod
def singleelement(storage):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
print(bytes(storage))
assert len(storage.units) == 1
return storage.units[0]
def test_simple(self):
- """test the most basic json conversion"""
+ """Test the most basic json conversion."""
jsonsource = """{ "text": "A simple string"}"""
poexpected = """#: .text
msgid "A simple string"
@@ -33,8 +52,28 @@ def test_simple(self):
poresult = self.json2po(jsonsource)
assert str(poresult.units[1]) == poexpected
+ def test_three_same_keys(self):
+ """Test that we can handle JSON with three (or more) same keys."""
+ jsonsource = """{
+ "a": {
+ "x": "X"
+ },
+ "b": {
+ "x": "X"
+ },
+ "c": {
+ "x": "X"
+ }
+}
+"""
+ poresult = self.json2po(jsonsource)
+ assert len(poresult.units) == 4
+ assert poresult.units[1].msgctxt == ['".a.x"']
+ assert poresult.units[2].msgctxt == ['".b.x"']
+ assert poresult.units[3].msgctxt == ['".c.x"']
+
def test_filter(self):
- """test basic json conversion with filter option"""
+ """Test basic json conversion with filter option."""
jsonsource = """{ "text": "A simple string", "number": 42 }"""
poexpected = """#: .text
msgid "A simple string"
@@ -44,7 +83,7 @@ def test_filter(self):
assert str(poresult.units[1]) == poexpected
def test_miltiple_units(self):
- """test that we can handle json with multiple units"""
+ """Test that we can handle json with multiple units."""
jsonsource = """
{
"name": "John",
@@ -77,7 +116,7 @@ def test_miltiple_units(self):
class TestJson2POCommand(test_convert.TestConvertCommand, TestJson2PO):
- """Tests running actual json2po commands on files"""
+ """Tests running actual json2po commands on files."""
convertmodule = json2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_moz2po.py b/tests/translate/convert/test_moz2po.py
index 711bce5336..865015352e 100644
--- a/tests/translate/convert/test_moz2po.py
+++ b/tests/translate/convert/test_moz2po.py
@@ -8,7 +8,7 @@ class TestMoz2PO:
class TestMoz2POCommand(test_convert.TestConvertCommand, TestMoz2PO):
- """Tests running actual moz2po commands on files"""
+ """Tests running actual moz2po commands on files."""
convertmodule = moz2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_mozfunny2prop.py b/tests/translate/convert/test_mozfunny2prop.py
index 55d8d29354..e4be79084d 100644
--- a/tests/translate/convert/test_mozfunny2prop.py
+++ b/tests/translate/convert/test_mozfunny2prop.py
@@ -7,22 +7,18 @@
class TestInc2PO:
@staticmethod
def inc2po(incsource, inctemplate=None):
- """helper that converts .inc source to po source without requiring files"""
+ """Helper that converts .inc source to po source without requiring files."""
inputfile = BytesIO(incsource.encode())
- if inctemplate:
- templatefile = BytesIO(inctemplate.encode())
- else:
- templatefile = None
+ templatefile = BytesIO(inctemplate.encode()) if inctemplate else None
outputfile = BytesIO()
result = mozfunny2prop.inc2po(inputfile, outputfile, templatefile)
assert result == 1
outputpo = outputfile.getvalue()
- outputpofile = po.pofile(BytesIO(outputpo))
- return outputpofile
+ return po.pofile(BytesIO(outputpo))
@staticmethod
def singleelement(pofile):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
assert len(pofile.units) == 2
assert pofile.units[0].isheader()
print(pofile)
@@ -30,13 +26,13 @@ def singleelement(pofile):
@staticmethod
def countelements(pofile):
- """counts the number of non-header entries"""
+ """Counts the number of non-header entries."""
assert pofile.units[0].isheader()
print(pofile)
return len(pofile.units) - 1
def test_simpleentry(self):
- """checks that a simple inc entry converts properly to a po entry"""
+ """Checks that a simple inc entry converts properly to a po entry."""
incsource = "#define MOZ_LANGPACK_CREATOR mozilla.org\n"
pofile = self.inc2po(incsource)
pounit = self.singleelement(pofile)
@@ -45,7 +41,7 @@ def test_simpleentry(self):
assert pounit.target == ""
def test_uncomment_contributors(self):
- """checks that the contributors entry is automatically uncommented"""
+ """Checks that the contributors entry is automatically uncommented."""
incsource = """# If non-English locales wish to credit multiple contributors, uncomment this
# variable definition and use the format specified.
# #define MOZ_LANGPACK_CONTRIBUTORS Joe SolonSuzy Solon"""
diff --git a/tests/translate/convert/test_mozlang2po.py b/tests/translate/convert/test_mozlang2po.py
index 9b41894a4c..424f4bda8b 100644
--- a/tests/translate/convert/test_mozlang2po.py
+++ b/tests/translate/convert/test_mozlang2po.py
@@ -143,7 +143,7 @@ def test_drop_duplicates(self):
class TestLang2POCommand(test_convert.TestConvertCommand, TestLang2PO):
- """Tests running actual lang2po commands on files"""
+ """Tests running actual lang2po commands on files."""
convertmodule = mozlang2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_ods2xliff.py b/tests/translate/convert/test_ods2xliff.py
index a45df7608f..4f4aadd3cc 100644
--- a/tests/translate/convert/test_ods2xliff.py
+++ b/tests/translate/convert/test_ods2xliff.py
@@ -6,7 +6,7 @@
class TestODF2XLIFFCommand(test_convert.TestConvertCommand):
- """Tests running actual odf2xliff commands on files"""
+ """Tests running actual odf2xliff commands on files."""
convertmodule = odf2xliff
diff --git a/tests/translate/convert/test_oo2po.py b/tests/translate/convert/test_oo2po.py
index 4b1e8097b4..564e069813 100644
--- a/tests/translate/convert/test_oo2po.py
+++ b/tests/translate/convert/test_oo2po.py
@@ -15,24 +15,22 @@ class TestOO2PO:
conversion_class = oo2po.oo2po
def convert(self, oosource, sourcelanguage="en-US", targetlanguage="af-ZA"):
- """helper that converts oo source to po source without requiring files"""
+ """Helper that converts oo source to po source without requiring files."""
if isinstance(oosource, str):
oosource = oosource.encode("utf-8")
inputoo = oo.oofile(oosource)
convertor = self.conversion_class(sourcelanguage, targetlanguage)
- outputpo = convertor.convertstore(inputoo)
- return outputpo
+ return convertor.convertstore(inputoo)
@staticmethod
def singleelement(pofile):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
if isinstance(pofile, poheader):
assert len(pofile.units) == 2
assert pofile.units[0].isheader()
return pofile.units[1]
- else:
- assert len(pofile.units) == 1
- return pofile.units[0]
+ assert len(pofile.units) == 1
+ return pofile.units[0]
def roundtripstring(self, filename, entitystring):
"""
@@ -40,7 +38,6 @@ def roundtripstring(self, filename, entitystring):
Return the string once it has been through all the conversions.
"""
-
ootemplate = r"helpcontent2 %s 0 help par_id3150670 35 0 en-US %s 2002-02-02 02:02:02"
oosource = (ootemplate % (filename, entitystring)).encode("utf-8")
@@ -75,7 +72,7 @@ def check_roundtrip(self, filename, text):
assert self.roundtripstring(filename, text) == text
def test_simpleentity(self):
- """checks that a simple oo entry converts properly to a po entry"""
+ """Checks that a simple oo entry converts properly to a po entry."""
oosource = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
pofile = self.convert(oosource)
pounit = self.singleelement(pofile)
@@ -83,7 +80,7 @@ def test_simpleentity(self):
assert pounit.target == ""
def test_escapes(self):
- """checks that a simple oo entry converts escapes properly to a po entry"""
+ """Checks that a simple oo entry converts escapes properly to a po entry."""
oosource = r"wizards source\formwizard\dbwizres.src 0 string RID_DB_FORM_WIZARD_START + 19 0 en-US Newline \n Newline Tab \t Tab CR \r CR 20050924 09:13:58"
pofile = self.convert(oosource)
pounit = self.singleelement(pofile)
@@ -122,7 +119,7 @@ def test_roundtrip_escape(self):
)
def test_roundtrip_whitespaceonly(self):
- """check items that are only special instances of whitespce"""
+ """Check items that are only special instances of whitespce."""
self.check_roundtrip("choose_chart_type.xhp", r" ")
self.check_roundtrip("choose_chart_type.xhp", b"\xc2\xa0".decode("utf-8"))
@@ -135,7 +132,7 @@ def test_double_escapes(self):
assert pounit.source == r"\<"
def test_escapes_helpcontent2(self):
- """checks that a helpcontent2 entry converts escapes properly to a po entry"""
+ """Checks that a helpcontent2 entry converts escapes properly to a po entry."""
oosource = r"helpcontent2 source\text\smath\guide\parentheses.xhp 0 help par_id3150344 4 0 en-US size *2 \\langle x \\rangle 2002-02-02 02:02:02"
pofile = self.convert(oosource)
pounit = self.singleelement(pofile)
@@ -144,7 +141,7 @@ def test_escapes_helpcontent2(self):
assert pounit.source == r"size *2 \langle x \rangle"
def test_msgid_bug_error_address(self):
- """tests the we have the correct url for reporting msgid bugs"""
+ """Tests the we have the correct url for reporting msgid bugs."""
oosource = r"wizards source\formwizard\dbwizres.src 0 string RID_DB_FORM_WIZARD_START + 19 0 en-US Newline \n Newline Tab \t Tab CR \r CR 20050924 09:13:58"
pofile = self.convert(oosource)
assert pofile.units[0].isheader()
@@ -161,7 +158,7 @@ def test_msgid_bug_error_address(self):
}
def test_x_comment_inclusion(self):
- """test that we can merge x-comment language entries into comment sections of the PO file"""
+ """Test that we can merge x-comment language entries into comment sections of the PO file."""
en_USsource = r"wizards source\formwizard\dbwizres.src 0 string RID_DB_FORM_WIZARD_START + 19 0 en-US Text Quickhelp Title 20050924 09:13:58"
xcommentsource = r"wizards source\formwizard\dbwizres.src 0 string RID_DB_FORM_WIZARD_START + 19 0 x-comment %s %s %s 20050924 09:13:58"
# Real comment
@@ -170,10 +167,7 @@ def test_x_comment_inclusion(self):
en_USsource + "\n" + xcommentsource % (comment, comment, comment)
)
pofile = self.convert(commentsource)
- if isinstance(pofile, poheader):
- units = pofile.units[1:]
- else:
- units = pofile.units
+ units = pofile.units[1:] if isinstance(pofile, poheader) else pofile.units
textunit = units[0]
assert textunit.source == "Text"
assert comment in textunit.getnotes("developer")
@@ -189,10 +183,7 @@ def test_x_comment_inclusion(self):
en_USsource + "\n" + xcommentsource % (comment, comment, comment)
)
pofile = self.convert(commentsource)
- if isinstance(pofile, poheader):
- units = pofile.units[1:]
- else:
- units = pofile.units
+ units = pofile.units[1:] if isinstance(pofile, poheader) else pofile.units
textunit = units[0]
assert textunit.source == "Text"
assert textunit.getnotes("developer") == ""
@@ -205,7 +196,7 @@ def test_x_comment_inclusion(self):
class TestOO2POCommand(test_convert.TestConvertCommand, TestOO2PO):
- """Tests running actual oo2po commands on files"""
+ """Tests running actual oo2po commands on files."""
convertmodule = oo2po
@@ -228,7 +219,7 @@ def test_preserve_filename(self):
assert oofile.filename.endswith("snippet.sdf")
def test_simple_pot(self):
- """tests the simplest possible conversion to a pot file"""
+ """Tests the simplest possible conversion to a pot file."""
oosource = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
self.create_testfile("simple.oo", oosource)
self.run_command("simple.oo", "simple.pot", pot=True, nonrecursiveinput=True)
@@ -238,7 +229,7 @@ def test_simple_pot(self):
assert poelement.target == ""
def test_simple_po(self):
- """tests the simplest possible conversion to a po file"""
+ """Tests the simplest possible conversion to a po file."""
oosource1 = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
oosource2 = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 ku Karakter 20050924 09:13:58"
self.create_testfile("simple.oo", oosource1 + "\n" + oosource2)
@@ -249,14 +240,14 @@ def test_simple_po(self):
assert poelement.target == "Karakter"
def test_onefile_nonrecursive(self):
- """tests the --multifile=onefile option and make sure it doesn't produce a directory"""
+ """Tests the --multifile=onefile option and make sure it doesn't produce a directory."""
oosource = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
self.create_testfile("simple.oo", oosource)
self.run_command("simple.oo", "simple.pot", pot=True, multifile="onefile")
assert os.path.isfile(self.get_testfilename("simple.pot"))
def test_remove_duplicates(self):
- """test that removing of duplicates works correctly (bug 171)"""
+ """Test that removing of duplicates works correctly (bug 171)."""
oosource = r"""
sd source\ui\animations\SlideTransitionPane.src 0 checkbox DLG_SLIDE_TRANSITION_PANE CB_AUTO_PREVIEW HID_SD_SLIDETRANSITIONPANE_CB_AUTO_PREVIEW 1 en-US Automatic preview 20060725 03:26:42
sd source\ui\animations\AnimationSchemesPane.src 0 checkbox DLG_ANIMATION_SCHEMES_PANE CB_AUTO_PREVIEW HID_SD_ANIMATIONSCHEMESPANE_CB_AUTO_PREVIEW 1 en-US Automatic preview 20060725 03:26:42
diff --git a/tests/translate/convert/test_oo2xliff.py b/tests/translate/convert/test_oo2xliff.py
index f9df734d15..ca925952b5 100644
--- a/tests/translate/convert/test_oo2xliff.py
+++ b/tests/translate/convert/test_oo2xliff.py
@@ -16,7 +16,7 @@ def test_msgid_bug_error_address(self):
class TestOO2POCommand(test_convert.TestConvertCommand, TestOO2XLIFF):
- """Tests running actual oo2xliff commands on files"""
+ """Tests running actual oo2xliff commands on files."""
convertmodule = oo2xliff
@@ -38,7 +38,7 @@ def test_preserve_filename(self):
assert oofile.filename.endswith("snippet.sdf")
def test_simple_xlf(self):
- """tests the simplest possible conversion to a xlf file"""
+ """Tests the simplest possible conversion to a xlf file."""
oosource = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
self.create_testfile("simple.oo", oosource)
self.run_command("simple.oo", "simple.xlf", lang="ku", nonrecursiveinput=True)
@@ -48,7 +48,7 @@ def test_simple_xlf(self):
assert poelement.target == ""
def test_simple_po(self):
- """tests the simplest possible conversion to a po file"""
+ """Tests the simplest possible conversion to a po file."""
oosource1 = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
oosource2 = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 ku Karakter 20050924 09:13:58"
self.create_testfile("simple.oo", oosource1 + "\n" + oosource2)
@@ -59,7 +59,7 @@ def test_simple_po(self):
assert poelement.target == "Karakter"
def test_onefile_nonrecursive(self):
- """tests the --multifile=onefile option and make sure it doesn't produce a directory"""
+ """Tests the --multifile=onefile option and make sure it doesn't produce a directory."""
oosource = r"svx source\dialog\numpages.src 0 string RID_SVXPAGE_NUM_OPTIONS STR_BULLET 0 en-US Character 20050924 09:13:58"
self.create_testfile("simple.oo", oosource)
self.run_command("simple.oo", "simple.xlf", lang="ku", multifile="onefile")
diff --git a/tests/translate/convert/test_php2po.py b/tests/translate/convert/test_php2po.py
index 8dd99ca869..117b9a937a 100644
--- a/tests/translate/convert/test_php2po.py
+++ b/tests/translate/convert/test_php2po.py
@@ -9,7 +9,7 @@
class TestPhp2PO:
@staticmethod
def php2po(phpsource, phptemplate=None):
- """helper that converts .php source to po source without requiring files"""
+ """Helper that converts .php source to po source without requiring files."""
inputfile = BytesIO(phpsource.encode())
output_file = BytesIO()
templatefile = None
@@ -21,7 +21,7 @@ def php2po(phpsource, phptemplate=None):
@staticmethod
def convertphp(phpsource, template=None, expected=1):
- """call run_converter, return the outputfile"""
+ """Call run_converter, return the outputfile."""
inputfile = BytesIO(phpsource.encode())
outputfile = BytesIO()
templatefile = None
@@ -32,7 +32,7 @@ def convertphp(phpsource, template=None, expected=1):
@staticmethod
def singleelement(pofile):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
assert len(pofile.units) == 2
assert pofile.units[0].isheader()
print(pofile)
@@ -40,13 +40,13 @@ def singleelement(pofile):
@staticmethod
def countelements(pofile):
- """counts the number of non-header entries"""
+ """Counts the number of non-header entries."""
assert pofile.units[0].isheader()
print(pofile)
return len(pofile.units) - 1
def test_simpleentry(self):
- """checks that a simple php entry converts properly to a po entry"""
+ """Checks that a simple php entry converts properly to a po entry."""
phpsource = """$_LANG['simple'] = 'entry';"""
pofile = self.php2po(phpsource)
pounit = self.singleelement(pofile)
@@ -54,7 +54,7 @@ def test_simpleentry(self):
assert pounit.target == ""
def test_convertphp(self):
- """checks that the convertphp function is working"""
+ """Checks that the convertphp function is working."""
phpsource = """$_LANG['simple'] = 'entry';"""
posource = self.convertphp(phpsource)
pofile = po.pofile(BytesIO(posource))
@@ -63,7 +63,7 @@ def test_convertphp(self):
assert pounit.target == ""
def test_convertphptemplate(self):
- """checks that the convertphp function is working with template"""
+ """Checks that the convertphp function is working with template."""
phpsource = """$_LANG['simple'] = 'entry';"""
phptemplate = """$_LANG['simple'] = 'source';"""
posource = self.convertphp(phpsource, phptemplate)
@@ -73,7 +73,7 @@ def test_convertphptemplate(self):
assert pounit.target == "entry"
def test_convertphpmissing(self):
- """checks that the convertphp function is working with missing key"""
+ """Checks that the convertphp function is working with missing key."""
phpsource = """$_LANG['simple'] = 'entry';"""
phptemplate = """$_LANG['missing'] = 'source';"""
posource = self.convertphp(phpsource, phptemplate)
@@ -83,7 +83,7 @@ def test_convertphpmissing(self):
assert pounit.target == ""
def test_convertphpempty(self):
- """checks that the convertphp function is working with empty template"""
+ """Checks that the convertphp function is working with empty template."""
phpsource = ""
phptemplate = ""
posource = self.convertphp(phpsource, phptemplate, 0)
@@ -91,17 +91,17 @@ def test_convertphpempty(self):
assert len(pofile.units) == 0
def test_unicode(self):
- """checks that unicode entries convert properly"""
- unistring = "Norsk bokm\u00E5l"
+ """Checks that unicode entries convert properly."""
+ unistring = "Norsk bokm\u00e5l"
phpsource = """$lang['nb'] = '%s';""" % unistring
pofile = self.php2po(phpsource)
pounit = self.singleelement(pofile)
print(repr(pofile.units[0].target))
print(repr(pounit.source))
- assert pounit.source == "Norsk bokm\u00E5l"
+ assert pounit.source == "Norsk bokm\u00e5l"
def test_multiline(self):
- """checks that multiline enties can be parsed"""
+ """Checks that multiline enties can be parsed."""
phpsource = r"""$lang['5093'] = 'Unable to connect to your IMAP server. You may have exceeded the maximum number
of connections to this server. If so, use the Advanced IMAP Server Settings dialog to
reduce the number of cached connections.';"""
@@ -110,7 +110,7 @@ def test_multiline(self):
assert self.countelements(pofile) == 1
def test_comments_before(self):
- """test to ensure that we take comments from .php and place them in .po"""
+ """Test to ensure that we take comments from .php and place them in .po."""
phpsource = """/* Comment */
$lang['prefPanel-smime'] = 'Security';"""
pofile = self.php2po(phpsource)
@@ -119,7 +119,7 @@ def test_comments_before(self):
# TODO write test for inline comments and check for // comments that precede an entry
def test_emptyentry(self):
- """checks that empty definitions survives into po file"""
+ """Checks that empty definitions survives into po file."""
phpsource = """/* comment */\n$lang['credit'] = '';"""
pofile = self.php2po(phpsource)
pounit = self.singleelement(pofile)
@@ -138,7 +138,7 @@ def test_hash_comment_with_equals(self):
assert pounit.source == "stringy"
def test_emptyentry_translated(self):
- """checks that if we translate an empty definition it makes it into the PO"""
+ """Checks that if we translate an empty definition it makes it into the PO."""
phptemplate = """$lang['credit'] = '';"""
phpsource = """$lang['credit'] = 'Translators Names';"""
pofile = self.php2po(phpsource, phptemplate)
@@ -148,7 +148,7 @@ def test_emptyentry_translated(self):
assert pounit.target == "Translators Names"
def test_newlines_in_value(self):
- """check that we can carry newlines that appear in the entry value into the PO"""
+ """Check that we can carry newlines that appear in the entry value into the PO."""
# Single quotes - \n is not a newline
phpsource = r"""$lang['name'] = 'value1\nvalue2';"""
pofile = self.php2po(phpsource)
@@ -161,7 +161,7 @@ def test_newlines_in_value(self):
assert unit.source == "value1\nvalue2"
def test_spaces_in_name(self):
- """checks that if we have spaces in the name we create a good PO with no spaces"""
+ """Checks that if we have spaces in the name we create a good PO with no spaces."""
phptemplate = """$lang[ 'credit' ] = 'Something';"""
phpsource = """$lang[ 'credit' ] = 'n Ding';"""
pofile = self.php2po(phpsource, phptemplate)
@@ -274,7 +274,7 @@ def test_unnamed_nested_arrays(self):
class TestPhp2POCommand(test_convert.TestConvertCommand, TestPhp2PO):
- """Tests running actual php2po commands on files"""
+ """Tests running actual php2po commands on files."""
convertmodule = php2po
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_po2csv.py b/tests/translate/convert/test_po2csv.py
index c1156355c9..807efc5d0f 100644
--- a/tests/translate/convert/test_po2csv.py
+++ b/tests/translate/convert/test_po2csv.py
@@ -10,7 +10,7 @@
class TestPO2CSV:
@staticmethod
def po2csv(posource):
- """helper that converts po source to csv source without requiring files"""
+ """Helper that converts po source to csv source without requiring files."""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2csv.po2csv()
@@ -18,7 +18,7 @@ def po2csv(posource):
@staticmethod
def csv2po(csvsource, template=None):
- """helper that converts csv source to po source without requiring files"""
+ """Helper that converts csv source to po source without requiring files."""
inputfile = BytesIO(csvsource)
inputcsv = csvl10n.csvfile(inputfile)
if template:
@@ -31,12 +31,12 @@ def csv2po(csvsource, template=None):
@staticmethod
def singleelement(storage):
- """checks that the pofile contains a single non-header element, and returns it"""
+ """Checks that the pofile contains a single non-header element, and returns it."""
assert headerless_len(storage.units) == 1
return first_translatable(storage)
def test_simpleentity(self):
- """checks that a simple csv entry definition converts properly to a po entry"""
+ """Checks that a simple csv entry definition converts properly to a po entry."""
minipo = r'''#: term.cpp
msgid "Term"
msgstr "asdf"'''
@@ -47,7 +47,7 @@ def test_simpleentity(self):
assert unit.target == "asdf"
def test_multiline(self):
- """tests multiline po entries"""
+ """Tests multiline po entries."""
minipo = r'''msgid "First part "
"and extra"
msgstr "Eerste deel "
@@ -58,7 +58,7 @@ def test_multiline(self):
assert unit.target == "Eerste deel en ekstra"
def test_escapednewlines(self):
- """Test the escaping of newlines"""
+ """Test the escaping of newlines."""
minipo = r"""msgid "First line\nSecond line"
msgstr "Eerste lyn\nTweede lyn"
"""
@@ -72,7 +72,7 @@ def test_escapednewlines(self):
assert unit.target == "Eerste lyn\nTweede lyn"
def test_escapedtabs(self):
- """Test the escaping of tabs"""
+ """Test the escaping of tabs."""
minipo = r"""msgid "First column\tSecond column"
msgstr "Eerste kolom\tTweede kolom"
"""
@@ -86,7 +86,7 @@ def test_escapedtabs(self):
)
def test_escapedquotes(self):
- """Test the escaping of quotes (and slash)"""
+ """Test the escaping of quotes (and slash)."""
minipo = r"""msgid "Hello \"Everyone\""
msgstr "Good day \"All\""
@@ -98,7 +98,7 @@ def test_escapedquotes(self):
assert csvfile.findunit('Use \\".').target == 'Gebruik \\".'
def test_escapedescape(self):
- """Test the escaping of pure escapes is unaffected"""
+ """Test the escaping of pure escapes is unaffected."""
minipo = r"""msgid "Find\\Options"
msgstr "Vind\\Opsies"
"""
@@ -108,7 +108,7 @@ def test_escapedescape(self):
assert csvfile.findunit(r"Find\Options").target == r"Vind\Opsies"
def test_singlequotes(self):
- """Tests that single quotes are preserved correctly"""
+ """Tests that single quotes are preserved correctly."""
minipo = """msgid "source 'source'"\nmsgstr "target 'target'"\n"""
csvfile = self.po2csv(minipo)
print(bytes(csvfile))
@@ -121,7 +121,7 @@ def test_singlequotes(self):
# TODO check that we escape on writing not in the internal representation
def test_empties(self):
- """Tests that things keep working with empty entries"""
+ """Tests that things keep working with empty entries."""
minipo = 'msgid "Source"\nmsgstr ""\n\nmsgid ""\nmsgstr ""'
csvfile = self.po2csv(minipo)
assert csvfile.findunit("Source") is not None
@@ -129,7 +129,7 @@ def test_empties(self):
assert headerless_len(csvfile.units) == 1
def test_kdecomments(self):
- """test that we don't carry KDE comments to CSV"""
+ """Test that we don't carry KDE comments to CSV."""
minipo = '#: simple.c\nmsgid "_: KDE comment\\n"\n"Same"\nmsgstr "Same"\n'
csvfile = self.po2csv(minipo)
unit = self.singleelement(csvfile)
@@ -138,7 +138,7 @@ def test_kdecomments(self):
class TestPO2CSVCommand(test_convert.TestConvertCommand, TestPO2CSV):
- """Tests running actual po2csv commands on files"""
+ """Tests running actual po2csv commands on files."""
convertmodule = po2csv
diff --git a/tests/translate/convert/test_po2dtd.py b/tests/translate/convert/test_po2dtd.py
index acf1b7120f..c0b5c9ed43 100644
--- a/tests/translate/convert/test_po2dtd.py
+++ b/tests/translate/convert/test_po2dtd.py
@@ -12,7 +12,7 @@
class TestPO2DTD:
@staticmethod
def po2dtd(posource, remove_untranslated=False):
- """helper that converts po source to dtd source without requiring files"""
+ """Helper that converts po source to dtd source without requiring files."""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2dtd.po2dtd(remove_untranslated=remove_untranslated)
@@ -20,7 +20,7 @@ def po2dtd(posource, remove_untranslated=False):
@staticmethod
def merge2dtd(dtdsource, posource):
- """helper that merges po translations to dtd source without requiring files"""
+ """Helper that merges po translations to dtd source without requiring files."""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
templatefile = BytesIO(dtdsource.encode())
@@ -30,7 +30,7 @@ def merge2dtd(dtdsource, posource):
@staticmethod
def convertdtd(posource, dtdtemplate, remove_untranslated=False):
- """helper to exercise the command line function"""
+ """Helper to exercise the command line function."""
inputfile = BytesIO(posource.encode())
outputfile = BytesIO()
templatefile = BytesIO(dtdtemplate.encode())
@@ -41,7 +41,7 @@ def convertdtd(posource, dtdtemplate, remove_untranslated=False):
@staticmethod
def roundtripsource(dtdsource):
- """converts dtd source to po and back again, returning the resulting source"""
+ """Converts dtd source to po and back again, returning the resulting source."""
dtdinputfile = BytesIO(dtdsource.encode())
dtdinputfile2 = BytesIO(dtdsource.encode())
pooutputfile = BytesIO()
@@ -58,15 +58,17 @@ def roundtripsource(dtdsource):
return dtdresult
def roundtripstring(self, entitystring):
- """Just takes the contents of a ENTITY definition (with quotes) and does a roundtrip on that"""
+ """Just takes the contents of a ENTITY definition (with quotes) and does a roundtrip on that."""
dtdintro, dtdoutro = "\n"
dtdsource = dtdintro + entitystring + dtdoutro
dtdresult = self.roundtripsource(dtdsource)
- assert dtdresult.startswith(dtdintro) and dtdresult.endswith(dtdoutro)
+ assert dtdresult.startswith(dtdintro)
+ assert dtdresult.endswith(dtdoutro)
return dtdresult[len(dtdintro) : -len(dtdoutro)]
def check_roundtrip(self, dtdsource, dtdcompare=None):
- """Checks that the round-tripped string is the same as dtdcompare.
+ """
+ Checks that the round-tripped string is the same as dtdcompare.
If no dtdcompare string is provided then the round-tripped string is
compared with the original string.
@@ -83,14 +85,14 @@ def check_roundtrip(self, dtdsource, dtdcompare=None):
assert self.roundtripstring(dtdsource) == dtdcompare
def test_joinlines(self):
- """tests that po lines are joined seamlessly (bug 16)"""
+ """Tests that po lines are joined seamlessly (bug 16)."""
multilinepo = """#: pref.menuPath\nmsgid ""\n"Tools > Options"\n"span>"\nmsgstr ""\n"""
dtdfile = self.po2dtd(multilinepo)
dtdsource = bytes(dtdfile)
assert b"" in dtdsource
def test_escapedstr(self):
- r"""tests that \n in msgstr is escaped correctly in dtd"""
+ r"""Tests that \n in msgstr is escaped correctly in dtd."""
multilinepo = (
"""#: pref.menuPath\nmsgid "Hello\\nEveryone"\nmsgstr "Good day\\nAll"\n"""
)
@@ -99,7 +101,7 @@ def test_escapedstr(self):
assert b"Good day\nAll" in dtdsource
def test_missingaccesskey(self):
- """tests that proper warnings are given if access key is missing"""
+ """Tests that proper warnings are given if access key is missing."""
simplepo = """#: simple.label
#: simple.accesskey
msgid "Simple &String"
@@ -112,7 +114,7 @@ def test_missingaccesskey(self):
self.merge2dtd(simpledtd, simplepo)
def test_accesskeycase(self):
- """tests that access keys come out with the same case as the original, regardless"""
+ """Tests that access keys come out with the same case as the original, regardless."""
simplepo_template = (
"""#: simple.label\n#: simple.accesskey\nmsgid "%s"\nmsgstr "%s"\n"""
)
@@ -147,7 +149,7 @@ def test_accesskeycase(self):
assert accel == target_akey
def test_accesskey_types(self):
- """tests that we can detect the various styles of accesskey"""
+ """Tests that we can detect the various styles of accesskey."""
simplepo_template = (
"""#: simple.%s\n#: simple.%s\nmsgid "&File"\nmsgstr "F&aele"\n"""
)
@@ -166,7 +168,7 @@ def test_accesskey_types(self):
)
def test_accesskey_missing(self):
- """tests that missing ampersands use the source accesskey"""
+ """Tests that missing ampersands use the source accesskey."""
po_snippet = r"""#: key.label
#: key.accesskey
msgid "&Search"
@@ -183,7 +185,7 @@ def test_accesskey_missing(self):
def test_accesskey_and_amp_case_no_accesskey(self):
"""
- tests that accesskey and & can work together
+ tests that accesskey and & can work together.
If missing we use the source accesskey
"""
@@ -202,7 +204,7 @@ def test_accesskey_and_amp_case_no_accesskey(self):
def test_accesskey_and_amp_source_no_amp_in_target(self):
"""
- tests that accesskey and & can work together
+ tests that accesskey and & can work together.
If present we use the target accesskey
"""
@@ -221,7 +223,7 @@ def test_accesskey_and_amp_source_no_amp_in_target(self):
def test_accesskey_and_amp_case_both_amp_and_accesskey(self):
"""
- tests that accesskey and & can work together
+ tests that accesskey and & can work together.
If present both & (and) and a marker then we use the correct source
accesskey
@@ -241,7 +243,7 @@ def test_accesskey_and_amp_case_both_amp_and_accesskey(self):
def test_accesskey_and_amp_case_amp_no_accesskey(self):
"""
- tests that accesskey and & can work together
+ tests that accesskey and & can work together.
If present both & (and) and a no marker then we use the correct source
accesskey
@@ -260,14 +262,14 @@ def test_accesskey_and_amp_case_amp_no_accesskey(self):
assert '"L"' in dtdsource
def test_entities_two(self):
- """test the error ouput when we find two entities"""
+ """Test the error ouput when we find two entities."""
simplestring = """#: simple.string second.string\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n"""
dtdfile = self.po2dtd(simplestring)
dtdsource = bytes(dtdfile)
assert b"CONVERSION NOTE - multiple entities" in dtdsource
def test_entities(self):
- """tests that entities are correctly idnetified in the dtd"""
+ """Tests that entities are correctly idnetified in the dtd."""
simplestring = (
"""#: simple.string\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n"""
)
@@ -276,14 +278,14 @@ def test_entities(self):
assert dtdsource.startswith(b"")
def test_retains_hashprefix(self):
- """tests that hash prefixes in the dtd are retained"""
+ """Tests that hash prefixes in the dtd are retained."""
hashpo = """#: lang.version\nmsgid "__MOZILLA_LOCALE_VERSION__"\nmsgstr "__MOZILLA_LOCALE_VERSION__"\n"""
hashdtd = '#expand \n'
dtdfile = self.merge2dtd(hashdtd, hashpo)
@@ -291,7 +293,7 @@ def test_retains_hashprefix(self):
assert regendtd == hashdtd
def test_convertdtd(self):
- """checks that the convertdtd function is working"""
+ """Checks that the convertdtd function is working."""
posource = """#: simple.label\n#: simple.accesskey\nmsgid "Simple &String"\nmsgstr "Dimpled &Ring"\n"""
dtdtemplate = """\n\n"""
dtdexpected = """\n\n"""
@@ -300,7 +302,7 @@ def test_convertdtd(self):
assert newdtd == dtdexpected
def test_untranslated_with_template(self):
- """test removing of untranslated entries in redtd"""
+ """Test removing of untranslated entries in redtd."""
posource = """#: simple.label
msgid "Simple string"
msgstr "Dimpled ring"
@@ -333,7 +335,7 @@ def test_untranslated_with_template(self):
assert newdtd == dtdexpected
def test_untranslated_without_template(self):
- """test removing of untranslated entries in po2dtd"""
+ """Test removing of untranslated entries in po2dtd."""
posource = """#: simple.label
msgid "Simple string"
msgstr "Dimpled ring"
@@ -359,7 +361,7 @@ def test_untranslated_without_template(self):
assert bytes(newdtd).decode("utf-8") == dtdexpected
def test_blank_source(self):
- """test removing of untranslated entries where source is blank"""
+ """Test removing of untranslated entries where source is blank."""
posource = """#: simple.label
msgid "Simple string"
msgstr "Dimpled ring"
@@ -394,7 +396,7 @@ def test_blank_source(self):
assert bytes(newdtd_no_template).decode("utf-8") == dtdexpected_no_template
def test_newlines_escapes(self):
- r"""check that we can handle a \n in the PO file"""
+ r"""Check that we can handle a \n in the PO file."""
posource = """#: simple.label\n#: simple.accesskey\nmsgid "A hard coded newline.\\n"\nmsgstr "Hart gekoeerde nuwe lyne\\n"\n"""
dtdtemplate = '\n'
dtdexpected = """\n"""
@@ -403,17 +405,18 @@ def test_newlines_escapes(self):
assert bytes(dtdfile).decode("utf-8") == dtdexpected
def test_roundtrip_simple(self):
- """checks that simple strings make it through a dtd->po->dtd roundtrip"""
+ """Checks that simple strings make it through a dtd->po->dtd roundtrip."""
self.check_roundtrip('"Hello"')
self.check_roundtrip('"Hello Everybody"')
def test_roundtrip_escape(self):
- """checks that escapes in strings make it through a dtd->po->dtd roundtrip"""
+ """Checks that escapes in strings make it through a dtd->po->dtd roundtrip."""
self.check_roundtrip(r'"Simple Escape \ \n \\ \: \t \r "')
self.check_roundtrip(r'"End Line Escape \"')
def test_roundtrip_quotes(self):
- """Checks that quotes make it through a DTD->PO->DTD roundtrip.
+ """
+ Checks that quotes make it through a DTD->PO->DTD roundtrip.
Quotes may be escaped or not.
"""
@@ -459,14 +462,15 @@ def test_roundtrip_quotes(self):
)
def test_roundtrip_amp(self):
- """Checks that quotes make it through a DTD->PO->DTD roundtrip.
+ """
+ Checks that quotes make it through a DTD->PO->DTD roundtrip.
Quotes may be escaped or not.
"""
self.check_roundtrip('"Colour & Light"')
def test_merging_entries_with_spaces_removed(self):
- """dtd2po removes pretty printed spaces, this tests that we can merge this back into the pretty printed dtd"""
+ """dtd2po removes pretty printed spaces, this tests that we can merge this back into the pretty printed dtd."""
posource = """#: simple.label\nmsgid "First line then "\n"next lines."\nmsgstr "Eerste lyne en dan volgende lyne."\n"""
dtdtemplate = (
'\n'
dtdexpected = '\n'
@@ -487,7 +491,7 @@ def test_preserving_spaces(self):
assert bytes(dtdfile).decode("utf-8") == dtdexpected
def test_preserving_spaces_after_value(self):
- """Preserve spaces after value. Bug 1662"""
+ """Preserve spaces after value. Bug 1662."""
# Space between value and >
posource = """#: simple.label\nmsgid "One"\nmsgstr "Een"\n"""
dtdtemplate = '\n'
@@ -503,7 +507,7 @@ def test_preserving_spaces_after_value(self):
assert bytes(dtdfile).decode("utf-8") == dtdexpected
def test_comments(self):
- """test that we preserve comments, bug 351"""
+ """Test that we preserve comments, bug 351."""
posource = '''#: name\nmsgid "Text"\nmsgstr "Teks"'''
dtdtemplate = """\n\n"""
dtdfile = self.merge2dtd(dtdtemplate % "Text", posource)
@@ -511,7 +515,7 @@ def test_comments(self):
assert bytes(dtdfile).decode("utf-8") == dtdtemplate % "Teks"
def test_duplicates(self):
- """test that we convert duplicates back correctly to their respective entries."""
+ """Test that we convert duplicates back correctly to their respective entries."""
posource = r"""#: bookmarksMenu.label bookmarksMenu.accesskey
msgctxt "bookmarksMenu.label bookmarksMenu.accesskey"
msgid "&Bookmarks"
@@ -543,7 +547,7 @@ def test_duplicates(self):
class TestPO2DTDCommand(test_convert.TestConvertCommand, TestPO2DTD):
- """Tests running actual po2dtd commands on files"""
+ """Tests running actual po2dtd commands on files."""
convertmodule = po2dtd
defaultoptions = {"progress": "none"}
diff --git a/tests/translate/convert/test_po2flatxml.py b/tests/translate/convert/test_po2flatxml.py
index 23b8c20cbc..d086785aae 100644
--- a/tests/translate/convert/test_po2flatxml.py
+++ b/tests/translate/convert/test_po2flatxml.py
@@ -1,4 +1,4 @@
-"""Tests converting Gettext PO localization files to flat XML files"""
+"""Tests converting Gettext PO localization files to flat XML files."""
from io import BytesIO
@@ -17,7 +17,7 @@ class TestPO2FlatXML:
@staticmethod
def _convert(postring, templatestring=None, **kwargs):
- """Helper that converts po source to xml target without requiring files"""
+ """Helper that converts po source to xml target without requiring files."""
inputfile = BytesIO(postring.encode())
templatefile = None
if templatestring:
@@ -80,7 +80,8 @@ def test_key(self):
assert actual == expected
def test_default_namespace(self):
- """Test a conversion with a default namespace.
+ """
+ Test a conversion with a default namespace.
This conversion requires a template that specifies the namespace
as default namespace; otherwise it will be generated.
@@ -132,7 +133,7 @@ def test_noindent(self):
class TestPO2FlatXMLCommand(test_convert.TestConvertCommand):
- """Tests running actual po2flatxml commands on files"""
+ """Tests running actual po2flatxml commands on files."""
convertmodule = po2flatxml
expected_options = [
diff --git a/tests/translate/convert/test_po2html.py b/tests/translate/convert/test_po2html.py
index fccee8dea2..b8ea824f78 100644
--- a/tests/translate/convert/test_po2html.py
+++ b/tests/translate/convert/test_po2html.py
@@ -11,7 +11,7 @@
class TestPO2Html:
@staticmethod
def converthtml(posource, htmltemplate, includefuzzy=False):
- """helper to exercise the command line function."""
+ """Helper to exercise the command line function."""
inputfile = BytesIO(posource.encode())
print(inputfile.getvalue())
outputfile = BytesIO()
@@ -21,14 +21,15 @@ def converthtml(posource, htmltemplate, includefuzzy=False):
return outputfile.getvalue().decode("utf-8")
def test_simple(self):
- """simple po to html test."""
+ """Simple po to html test."""
htmlsource = "
This app requires permission to access all
+files on the storage.
+
+
+ ]]>"""
+ content = f"""
+
+ {body}
+"""
+ store = self.StoreClass()
+ store.parse(content.encode())
+ assert len(store.units) == 1
+ assert store.units[0].target == body
+ assert bytes(store).decode() == content
+ store.units[0].target = body
+ assert bytes(store).decode() == content
+
+ def test_prefix(self):
+ body = "< body"
+ content = f"""
+
+ {body}
+"""
+ store = self.StoreClass()
+ store.parse(content.encode())
+ assert len(store.units) == 1
+ assert store.units[0].target == body
+ assert bytes(store).decode() == content
+
+ def test_rtl(self):
+ content = """
+
+ {app_name} צרכה רשות לגשת ליומן שלך, על מנת ליצור תוכניות עבור עסקאות עתידיות חוזרות.
+"""
+ store = self.StoreClass()
+ store.parse(content.encode())
+ assert len(store.units) == 1
+ assert (
+ store.units[0].target
+ == "{app_name} צרכה רשות לגשת ליומן שלך, על מנת ליצור תוכניות עבור עסקאות עתידיות חוזרות."
+ )
+
+
+class TestMOKOResourceUnit(test_monolingual.TestMonolingualUnit):
+ UnitClass = aresource.MOKOResourceUnit
+
+
+class TestMOKOResourceFile(test_monolingual.TestMonolingualStore):
+ StoreClass = aresource.MOKOResourceFile
+
+ def test_plural(self):
+ content = """
+
+
+ %d visitor
+ %d visitors
+
+"""
+ store = self.StoreClass()
+ store.parse(content.encode())
+ assert store.units[0].target == multistring(["%d visitor", "%d visitors"])
+ store = self.StoreClass()
+ store.targetlanguage = "zh-rHK"
+ store.parse(content.encode())
+ store.units[0].target = "%d 訪客"
+ assert (
+ bytes(store).decode()
+ == """
+
+
+ %d 訪客
+ """
)
diff --git a/tests/translate/storage/test_base.py b/tests/translate/storage/test_base.py
index b42e148c26..fafee6e05a 100644
--- a/tests/translate/storage/test_base.py
+++ b/tests/translate/storage/test_base.py
@@ -16,8 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see .
-"""tests for storage base classes"""
-
+"""tests for storage base classes."""
import os
from io import BytesIO
@@ -29,22 +28,21 @@
def headerless_len(units):
- """return count of translatable (non header) units"""
+ """Return count of translatable (non header) units."""
return len(list(filter(lambda x: not x.isheader(), units)))
def first_translatable(store):
- """returns first translatable unit, skipping header if present"""
+ """Returns first translatable unit, skipping header if present."""
if store.units[0].isheader() and len(store.units) > 1:
return store.units[1]
- else:
- return store.units[0]
+ return store.units[0]
class TestTranslationUnit:
"""
Tests a TranslationUnit.
- Derived classes can reuse these tests by pointing UnitClass to a derived Unit
+ Derived classes can reuse these tests by pointing UnitClass to a derived Unit.
"""
UnitClass = base.TranslationUnit
@@ -53,27 +51,28 @@ def setup_method(self, method):
self.unit = self.UnitClass("Test String")
def test_isfuzzy(self):
- """Test that we can call isfuzzy() on a unit.
+ """
+ Test that we can call isfuzzy() on a unit.
The default return value for isfuzzy() should be False.
"""
assert not self.unit.isfuzzy()
def test_create(self):
- """tests a simple creation with a source string"""
+ """Tests a simple creation with a source string."""
unit = self.unit
print("unit.source:", unit.source)
assert unit.source == "Test String"
def test_eq(self):
- """tests equality comparison"""
+ """Tests equality comparison."""
unit1 = self.unit
unit2 = self.UnitClass("Test String")
unit3 = self.UnitClass("Test String")
unit4 = self.UnitClass("Blessed String")
unit5 = self.UnitClass("Blessed String")
unit6 = self.UnitClass("Blessed String")
- assert unit1 == unit1
+ assert unit1 == unit1 # noqa: PLR0124
assert unit1 == unit2
assert unit1 != unit4
unit1.target = "Stressed Ting"
@@ -92,10 +91,10 @@ def test_eq(self):
# - iniunit, phpunit and propunit (properties) can have different source/target
# and are reunited when serializing through `self.translation or self.value`
assert unit1 == unit6
- assert not (unit1 != unit6)
+ assert unit1 == unit6
else:
assert unit1 != unit6
- assert not (unit1 == unit6)
+ assert unit1 != unit6
def test_target(self):
unit = self.unit
@@ -145,7 +144,6 @@ def test_difficult_escapes(self):
Test difficult characters that might go wrong in a quoting and
escaping roundtrip.
"""
-
unit = self.unit
specials = [
"\\n",
@@ -239,23 +237,23 @@ class TestTranslationStore:
StoreClass = base.TranslationStore
def setup_method(self, method):
- """Allocates a unique self.filename for the method, making sure it doesn't exist"""
+ """Allocates a unique self.filename for the method, making sure it doesn't exist."""
self.filename = f"{self.__class__.__name__}_{method.__name__}.test"
if os.path.exists(self.filename):
os.remove(self.filename)
def teardown_method(self, method):
- """Makes sure that if self.filename was created by the method, it is cleaned up"""
+ """Makes sure that if self.filename was created by the method, it is cleaned up."""
if os.path.exists(self.filename):
os.remove(self.filename)
def test_create_blank(self):
- """Tests creating a new blank store"""
+ """Tests creating a new blank store."""
store = self.StoreClass()
assert headerless_len(store.units) == 0
def test_add(self):
- """Tests adding a new unit with a source string"""
+ """Tests adding a new unit with a source string."""
store = self.StoreClass()
unit = store.addsourceunit("Test String")
print(str(unit))
@@ -264,7 +262,7 @@ def test_add(self):
assert unit.source == "Test String"
def test_remove(self):
- """Tests removing a unit with a source string"""
+ """Tests removing a unit with a source string."""
store = self.StoreClass()
unit = store.addsourceunit("Test String")
# Some storages (MO, OmegaT) serialize only translated units
@@ -279,7 +277,7 @@ def test_remove(self):
assert withoutunit != withunit
def test_find(self):
- """Tests searching for a given source string"""
+ """Tests searching for a given source string."""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit2 = store.addsourceunit("Blessed String")
@@ -298,13 +296,13 @@ def test_translate(self):
assert store.translate("Beziér curve") == "Beziér-kurwe"
def reparse(self, store):
- """converts the store to a string and back to a store again"""
+ """Converts the store to a string and back to a store again."""
storestring = bytes(store)
return self.StoreClass.parsestring(storestring)
@staticmethod
def check_equality(store1, store2):
- """asserts that store1 and store2 are the same"""
+ """Asserts that store1 and store2 are the same."""
assert headerless_len(store1.units) == headerless_len(store2.units)
for n, store1unit in enumerate(store1.units):
store2unit = store2.units[n]
@@ -321,7 +319,7 @@ def check_equality(store1, store2):
assert store1unit == store2unit
def test_parse(self):
- """Tests converting to a string and parsing the resulting string"""
+ """Tests converting to a string and parsing the resulting string."""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit1.target = "Test String"
@@ -331,7 +329,7 @@ def test_parse(self):
self.check_equality(store, newstore)
def test_files(self):
- """Tests saving to and loading from files"""
+ """Tests saving to and loading from files."""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit1.target = "Test String"
diff --git a/tests/translate/storage/test_catkeys.py b/tests/translate/storage/test_catkeys.py
index a2f3652351..0e67ccf81e 100644
--- a/tests/translate/storage/test_catkeys.py
+++ b/tests/translate/storage/test_catkeys.py
@@ -23,7 +23,7 @@ class test but eliminate a few of the actual tests.
assert unit.source == special
def test_newlines(self):
- """Wordfast does not like real newlines"""
+ """Wordfast does not like real newlines."""
unit = self.UnitClass("One\nTwo")
assert unit.dict["source"] == "One\\nTwo"
@@ -37,7 +37,6 @@ def test_istranslated(self):
def test_note_sanity(self):
"""Override test, since the format doesn't support notes."""
- pass
class TestCatkeysFile(test_base.TestTranslationStore):
diff --git a/tests/translate/storage/test_cpo.py b/tests/translate/storage/test_cpo.py
index 04b4ce534c..446d0cc30d 100644
--- a/tests/translate/storage/test_cpo.py
+++ b/tests/translate/storage/test_cpo.py
@@ -43,7 +43,7 @@ def test_plurals(self):
assert unit.target == "Sk\u00ear"
def test_plural_reduction(self):
- """checks that reducing the number of plurals supplied works"""
+ """Checks that reducing the number of plurals supplied works."""
unit = self.UnitClass("Tree")
unit.msgid_plural = ["Trees"]
assert isinstance(unit.source, multistring)
@@ -61,7 +61,7 @@ def test_plural_reduction(self):
assert unit.target.strings == ["Een Boom"]
def test_notes(self):
- """tests that the generic notes API works"""
+ """Tests that the generic notes API works."""
unit = self.UnitClass("File")
assert unit.getnotes() == ""
unit.addnote("Which meaning of file?")
@@ -76,7 +76,7 @@ def test_notes(self):
unit.getnotes("devteam")
def test_notes_withcomments(self):
- """tests that when we add notes that look like comments that we treat them properly"""
+ """Tests that when we add notes that look like comments that we treat them properly."""
unit = self.UnitClass("File")
unit.addnote("# Double commented comment")
assert unit.getnotes() == "# Double commented comment"
@@ -87,7 +87,7 @@ class TestCPOFile(test_po.TestPOFile):
StoreClass = cpo.pofile
def test_msgidcomments(self):
- """checks that we handle msgid comments"""
+ """Checks that we handle msgid comments."""
posource = 'msgid "test me"\nmsgstr ""'
pofile = self.poparse(posource)
thepo = pofile.units[0]
@@ -100,7 +100,7 @@ def test_msgidcomments(self):
@mark.xfail(reason="Were disabled during port of Pypo to cPO - they might work")
def test_merge_duplicates_msgctxt(self):
- """checks that merging duplicates works for msgctxt"""
+ """Checks that merging duplicates works for msgctxt."""
posource = '#: source1\nmsgid "test me"\nmsgstr ""\n\n#: source2\nmsgid "test me"\nmsgstr ""\n'
pofile = self.poparse(posource)
assert len(pofile.units) == 2
@@ -112,7 +112,7 @@ def test_merge_duplicates_msgctxt(self):
@mark.xfail(reason="Were disabled during port of Pypo to cPO - they might work")
def test_merge_blanks(self):
- """checks that merging adds msgid_comments to blanks"""
+ """Checks that merging adds msgid_comments to blanks."""
posource = (
'#: source1\nmsgid ""\nmsgstr ""\n\n#: source2\nmsgid ""\nmsgstr ""\n'
)
@@ -127,7 +127,7 @@ def test_merge_blanks(self):
@mark.xfail(reason="Were disabled during port of Pypo to cPO - they might work")
def test_msgid_comment(self):
- """checks that when adding msgid_comments we place them on a newline"""
+ """Checks that when adding msgid_comments we place them on a newline."""
posource = '#: source0\nmsgid "Same"\nmsgstr ""\n\n#: source1\nmsgid "Same"\nmsgstr ""\n'
pofile = self.poparse(posource)
assert len(pofile.units) == 2
@@ -145,7 +145,7 @@ def test_msgid_comment(self):
@mark.xfail(reason="Were disabled during port of Pypo to cPO - they might work")
def test_keep_blanks(self):
- """checks that keeping keeps blanks and doesn't add msgid_comments"""
+ """Checks that keeping keeps blanks and doesn't add msgid_comments."""
posource = (
'#: source1\nmsgid ""\nmsgstr ""\n\n#: source2\nmsgid ""\nmsgstr ""\n'
)
@@ -158,7 +158,7 @@ def test_keep_blanks(self):
assert cpo.unquotefrompo(pofile.units[1].msgidcomments) == ""
def test_output_str_unicode(self):
- """checks that we can serialize pofile, unit content is in unicode"""
+ """Checks that we can serialize pofile, unit content is in unicode."""
posource = """#: nb\nmsgid "Norwegian Bokmål"\nmsgstr ""\n"""
pofile = self.StoreClass(BytesIO(posource.encode("UTF-8")), encoding="UTF-8")
assert len(pofile.units) == 1
@@ -178,7 +178,7 @@ def test_output_str_unicode(self):
# assert halfstr.encode("UTF-8") in bytes(pofile)
def test_posections(self):
- """checks the content of all the expected sections of a PO message"""
+ """Checks the content of all the expected sections of a PO message."""
posource = '# other comment\n#. automatic comment\n#: source comment\n#, fuzzy\nmsgid "One"\nmsgstr "Een"\n'
pofile = self.poparse(posource)
print(pofile)
@@ -186,7 +186,7 @@ def test_posections(self):
assert bytes(pofile).decode("utf-8") == posource
def test_multiline_obsolete(self):
- """Tests for correct output of mulitline obsolete messages"""
+ """Tests for correct output of mulitline obsolete messages."""
posource = '#~ msgid ""\n#~ "Old thing\\n"\n#~ "Second old thing"\n#~ msgstr ""\n#~ "Ou ding\\n"\n#~ "Tweede ou ding"\n'
pofile = self.poparse(posource)
print("Source:\n%s" % posource)
@@ -197,7 +197,7 @@ def test_multiline_obsolete(self):
assert bytes(pofile).decode("utf-8") == posource
def test_unassociated_comments(self):
- """tests behaviour of unassociated comments."""
+ """Tests behaviour of unassociated comments."""
oldsource = '# old lonesome comment\n\nmsgid "one"\nmsgstr "een"\n'
oldfile = self.poparse(oldsource)
print("serialize", bytes(oldfile))
diff --git a/tests/translate/storage/test_csvl10n.py b/tests/translate/storage/test_csvl10n.py
index 83f5ca0eb8..3f07a7a8fa 100644
--- a/tests/translate/storage/test_csvl10n.py
+++ b/tests/translate/storage/test_csvl10n.py
@@ -19,7 +19,7 @@ def parse_store(self, source, **kwargs):
return self.StoreClass(BytesIO(source), **kwargs)
def test_singlequoting(self):
- """Tests round trip on single quoting at start of string"""
+ """Tests round trip on single quoting at start of string."""
store = self.StoreClass()
unit1 = store.addsourceunit("Test 'String'")
assert unit1.source == "Test 'String'"
@@ -136,3 +136,8 @@ def test_encoding(self):
assert store.units[0].target == "zkouška sirén"
with pytest.raises(UnicodeDecodeError):
store = self.parse_store(content.encode("iso-8859-2"), encoding="utf-8")
+
+ def test_corrupt(self):
+ store = self.StoreClass()
+ with pytest.raises(ValueError):
+ store.parse(b"PK\x03\x04\x14\x00\x06\x00\x08\x00\x00\x00!\x00b\xee\x9d")
diff --git a/tests/translate/storage/test_directory.py b/tests/translate/storage/test_directory.py
index a0f10cdcd2..a6db5ae4af 100644
--- a/tests/translate/storage/test_directory.py
+++ b/tests/translate/storage/test_directory.py
@@ -1,4 +1,4 @@
-"""Tests for the directory module"""
+"""Tests for the directory module."""
import os
@@ -6,22 +6,22 @@
class TestDirectory:
- """a test class to run tests on a test Pootle Server"""
+ """a test class to run tests on a test Pootle Server."""
def setup_method(self, method):
- """sets up a test directory"""
+ """Sets up a test directory."""
print("setup_method called on", self.__class__.__name__)
self.testdir = "%s_testdir" % (self.__class__.__name__)
self.cleardir(self.testdir)
os.mkdir(self.testdir)
def teardown_method(self, method):
- """removes the attributes set up by setup_method"""
+ """Removes the attributes set up by setup_method."""
self.cleardir(self.testdir)
@staticmethod
def cleardir(dirname):
- """removes the given directory"""
+ """Removes the given directory."""
if os.path.exists(dirname):
for dirpath, subdirs, filenames in os.walk(dirname, topdown=False):
for name in filenames:
@@ -44,7 +44,7 @@ def mkdir(self, dir):
os.mkdir(os.path.join(self.testdir, dir))
def test_created(self):
- """test that the directory actually exists"""
+ """Test that the directory actually exists."""
print(self.testdir)
assert os.path.isdir(self.testdir)
diff --git a/tests/translate/storage/test_dtd.py b/tests/translate/storage/test_dtd.py
index c1c1c20d4b..30047fe87d 100644
--- a/tests/translate/storage/test_dtd.py
+++ b/tests/translate/storage/test_dtd.py
@@ -61,8 +61,7 @@ def test_roundtrip_quoting():
quoted_special = dtd.quotefordtd(special)
unquoted_special = dtd.unquotefromdtd(quoted_special)
print(
- "special: %r\nquoted: %r\nunquoted: %r\n"
- % (special, quoted_special, unquoted_special)
+ f"special: {special!r}\nquoted: {quoted_special!r}\nunquoted: {unquoted_special!r}\n"
)
assert special == unquoted_special
@@ -71,12 +70,12 @@ def test_roundtrip_quoting():
def test_quotefordtd_unimplemented_cases():
"""Test unimplemented quoting DTD cases."""
assert dtd.quotefordtd("Between
and
") == (
- '"Between <p> and' ' </p>"'
+ '"Between <p> and </p>"'
)
def test_quotefordtd():
- """Test quoting DTD definitions"""
+ """Test quoting DTD definitions."""
assert dtd.quotefordtd("") == '""'
assert dtd.quotefordtd("") == '""'
assert dtd.quotefordtd("Completed %S") == '"Completed %S"'
@@ -105,7 +104,7 @@ def test_unquotefromdtd_unimplemented_cases():
def test_unquotefromdtd():
- """Test unquoting DTD definitions"""
+ """Test unquoting DTD definitions."""
# %
assert dtd.unquotefromdtd('"Completed %S"') == "Completed %S"
assert dtd.unquotefromdtd('"Completed %S"') == "Completed %S"
@@ -138,8 +137,7 @@ def test_android_roundtrip_quoting():
quoted_special = dtd.quoteforandroid(special)
unquoted_special = dtd.unquotefromandroid(quoted_special)
print(
- "special: %r\nquoted: %r\nunquoted: %r\n"
- % (special, quoted_special, unquoted_special)
+ f"special: {special!r}\nquoted: {quoted_special!r}\nunquoted: {unquoted_special!r}\n"
)
assert special == unquoted_special
@@ -159,7 +157,7 @@ def test_unquotefromandroid():
def test_removeinvalidamp(recwarn):
- """tests the the removeinvalidamps function"""
+ """Tests the the removeinvalidamps function."""
def tester(actual, expected=None):
if expected is None:
@@ -193,18 +191,18 @@ class TestDTD(test_monolingual.TestMonolingualStore):
@staticmethod
def dtdparse(dtdsource):
- """helper that parses dtd source without requiring files"""
+ """Helper that parses dtd source without requiring files."""
if not isinstance(dtdsource, bytes):
dtdsource = dtdsource.encode("utf-8")
dummyfile = BytesIO(dtdsource)
return dtd.dtdfile(dummyfile)
def dtdregen(self, dtdsource):
- """helper that converts dtd source to dtdfile object and back"""
+ """Helper that converts dtd source to dtdfile object and back."""
return bytes(self.dtdparse(dtdsource)).decode("utf-8")
def test_simpleentity(self):
- """checks that a simple dtd entity definition is parsed correctly"""
+ """Checks that a simple dtd entity definition is parsed correctly."""
dtdsource = '\n'
dtdfile = self.dtdparse(dtdsource)
assert len(dtdfile.units) == 1
@@ -213,13 +211,13 @@ def test_simpleentity(self):
assert dtdunit.definition == '"bananas for sale"'
def test_blanklines(self):
- """checks that blank lines don't break the parsing or regeneration"""
+ """Checks that blank lines don't break the parsing or regeneration."""
dtdsource = '\n\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_simpleentity_source(self):
- """checks that a simple dtd entity definition can be regenerated as source"""
+ """Checks that a simple dtd entity definition can be regenerated as source."""
dtdsource = '\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
@@ -229,19 +227,19 @@ def test_simpleentity_source(self):
assert dtdsource == dtdregen
def test_hashcomment_source(self):
- """checks that a #expand comment is retained in the source"""
+ """Checks that a #expand comment is retained in the source."""
dtdsource = '#expand \n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_commentclosing(self):
- """tests that comment closes with trailing space aren't duplicated"""
+ """Tests that comment closes with trailing space aren't duplicated."""
dtdsource = ' \n\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_commententity(self):
- """check that we don't process messages in : bug 102"""
+ """Check that we don't process messages in : bug 102."""
dtdsource = """"""
dtdfile = self.dtdparse(dtdsource)
@@ -251,7 +249,7 @@ def test_commententity(self):
assert dtdunit.isblank()
def test_newlines_in_entity(self):
- """tests that we can handle newlines in the entity itself"""
+ """Tests that we can handle newlines in the entity itself."""
dtdsource = """
Check the file name for capitalisation or other typing errors.
@@ -294,7 +292,7 @@ def test_newlines_in_entity(self):
assert dtdsource == dtdregen
def test_conflate_comments(self):
- """Tests that comments don't run onto the same line"""
+ """Tests that comments don't run onto the same line."""
dtdsource = '\n\n\n'
dtdregen = self.dtdregen(dtdsource)
print(dtdsource)
@@ -302,7 +300,7 @@ def test_conflate_comments(self):
assert dtdsource == dtdregen
def test_localisation_notes(self):
- """test to ensure that we retain the localisation note correctly"""
+ """Test to ensure that we retain the localisation note correctly."""
dtdsource = """
"""
@@ -310,14 +308,14 @@ def test_localisation_notes(self):
assert dtdsource == dtdregen
def test_entitityreference_in_source(self):
- """checks that an &entity; in the source is retained"""
+ """Checks that an &entity; in the source is retained."""
dtdsource = '\n%realBrandDTD;\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
# test for bug #610
def test_entitityreference_order_in_source(self):
- """checks that an &entity; in the source is retained"""
+ """Checks that an &entity; in the source is retained."""
dtdsource = '\n%realBrandDTD;\n\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
@@ -340,20 +338,20 @@ def test_entitityreference_order_in_source(self):
@mark.xfail(reason="Not Implemented")
def test_comment_following(self):
- """check that comments that appear after and entity are not pushed onto another line"""
+ """Check that comments that appear after and entity are not pushed onto another line."""
dtdsource = ' '
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_comment_newline_space_closing(self):
- """check that comments that are closed by a newline then space then --> don't break the following entries"""
+ """Check that comments that are closed by a newline then space then --> don't break the following entries."""
dtdsource = '\n\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
@mark.xfail(reason="Not Implemented")
def test_invalid_quoting(self):
- """checks that invalid quoting doesn't work - quotes can't be reopened"""
+ """Checks that invalid quoting doesn't work - quotes can't be reopened."""
# TODO: we should rather raise an error
dtdsource = '\n'
assert (
@@ -366,7 +364,7 @@ def test_invalid_quoting(self):
assert bytes(dtdfile) == b'\n'
def test_missing_quotes(self, recwarn):
- """test that we fail gracefully when a message without quotes is found (bug #161)"""
+ """Test that we fail gracefully when a message without quotes is found (bug #161)."""
dtdsource = '\n\n'
dtdfile = self.dtdparse(dtdsource)
assert len(dtdfile.units) == 1
@@ -374,7 +372,7 @@ def test_missing_quotes(self, recwarn):
# Test for bug #68
def test_entity_escaping(self):
- """Test entities escaping (& " < > ') (bug #68)"""
+ """Test entities escaping (& " < > ') (bug #68)."""
dtdsource = (
'\n\n' % encoding
- stringfile = BytesIO((xmldecl + string).encode())
- return stringfile
+ return BytesIO((xmldecl + string).encode())
def test_root_config_detect(self):
- """Test that parser fails on inconsistent root name configuration"""
+ """Test that parser fails on inconsistent root name configuration."""
xmlsource = 'Test'
with raises(AssertionError):
self.StoreClass(xmlsource, root_name="different")
def test_value_config_detect(self):
- """Test that parser fails on inconsistent value name configuration"""
+ """Test that parser fails on inconsistent value name configuration."""
xmlsource = 'Test'
with raises(AssertionError):
self.StoreClass(xmlsource, value_name="different")
def test_key_config_detect(self):
- """Test that parser fails on inconsistent key name configuration"""
+ """Test that parser fails on inconsistent key name configuration."""
xmlsource = 'Test'
with raises(AssertionError):
self.StoreClass(xmlsource, key_name="different")
def test_value_config_mixed_ok(self):
- """Test that parser leaves non-value entries alone"""
+ """Test that parser leaves non-value entries alone."""
xmlsource = """Testthis entry does not matter
@@ -60,7 +59,8 @@ def test_value_config_mixed_ok(self):
assert len(list(store.root.iterchildren("not-a-value"))) == 1
def test_namespace_config_detect(self):
- """Test that parser fails on inconsistent root namespace configuration.
+ """
+ Test that parser fails on inconsistent root namespace configuration.
This test triggers at root level, and yields a similar message as
the test against the root element name.
diff --git a/tests/translate/storage/test_fluent.py b/tests/translate/storage/test_fluent.py
index 913b804773..0a92532741 100644
--- a/tests/translate/storage/test_fluent.py
+++ b/tests/translate/storage/test_fluent.py
@@ -41,8 +41,7 @@ class TestFluentFile(test_monolingual.TestMonolingualStore):
def fluent_parse(fluent_source: str) -> fluent.FluentFile:
"""Helper that parses Fluent source without requiring files."""
dummyfile = BytesIO(fluent_source.encode())
- fluent_file = fluent.FluentFile(dummyfile)
- return fluent_file
+ return fluent.FluentFile(dummyfile)
@staticmethod
def fluent_serialize(fluent_file: fluent.FluentFile) -> str:
@@ -55,7 +54,8 @@ def fluent_regen(self, fluent_source: str) -> str:
@staticmethod
def quick_fluent_file(unit_specs: list[dict[str, str]]) -> fluent.FluentFile:
- """Helper to create a FluentFile populated by the FluentUnits
+ """
+ Helper to create a FluentFile populated by the FluentUnits
parametrised in `unit_specs`.
"""
fluent_file = fluent.FluentFile()
@@ -90,7 +90,8 @@ def assert_selector_branch(
found_selector_branches: list[dict[str, Any]],
found_selector_nodes: list[dict[str, Any]],
) -> None:
- """Assert that the given selector branch matches the expected entry in
+ """
+ Assert that the given selector branch matches the expected entry in
found_selector_branches at the given expect_index.
"""
assert expect_index >= 0
@@ -142,7 +143,8 @@ def assert_selector_node(
found_selector_branches: list[dict[str, Any]],
found_selector_nodes: list[dict[str, Any]],
) -> None:
- """Assert that the given selector node matches the expected entry in
+ """
+ Assert that the given selector node matches the expected entry in
found_selector_nodes at the given expect_index.
"""
assert selector_node is not None
@@ -188,7 +190,8 @@ def assert_parts(
fluent_unit: fluent.FluentUnit,
expect_parts: list[dict[str, Any]] | None,
) -> None:
- """Assert that the given fluent unit has the expected parts.
+ """
+ Assert that the given fluent unit has the expected parts.
Each part should be a dictionary defining its "name", "selector-nodes",
"selector-branches" and "pattern-variants".
@@ -333,7 +336,8 @@ def assert_parts(
def assert_units(
cls, fluent_file: fluent.FluentFile, expect_units: list[dict[str, Any]]
) -> None:
- """Assert that the given FluentFile has the expected FluentUnits.
+ """
+ Assert that the given FluentFile has the expected FluentUnits.
:param FluentFile fluent_file: The file to test.
:param list[dict] expect_units: A list of the expected units, specified
@@ -354,10 +358,7 @@ def assert_units(
assert unit_id == unit.getid()
id_type = None
if unit_id:
- if unit_id.startswith("-"):
- id_type = "Term"
- else:
- id_type = "Message"
+ id_type = "Term" if unit_id.startswith("-") else "Message"
assert expect.get("type", id_type) == unit.fluent_type
if unit.fluent_type.endswith("Comment"):
assert unit.isheader()
@@ -382,7 +383,8 @@ def basic_test(
expect_units: list[dict[str, Any]],
expect_serialize: str | None = None,
) -> None:
- """Assert that the given fluent source parses correctly to the expected
+ """
+ Assert that the given fluent source parses correctly to the expected
FluentFile, and reserializes correctly.
:param str fluent_source: The fluent source. Any common indent in this
@@ -412,7 +414,8 @@ def basic_test(
def assert_serialize(
self, fluent_file: fluent.FluentFile, expect_serialize: str
) -> None:
- """Assert that the given FluentFile serializes to the given string.
+ """
+ Assert that the given FluentFile serializes to the given string.
:param FluentFile fluent_file: The FluentFile to serialize.
:param str expect_serialize: The expected result. Any common indent in
@@ -422,7 +425,8 @@ def assert_serialize(
assert self.fluent_serialize(fluent_file) == expect_serialize
def assert_parse_failure(self, fluent_source: str, error_part: str) -> None:
- """Assert that the given fluent source fails to parse into a
+ """
+ Assert that the given fluent source fails to parse into a
FluentFile.
:param str fluent_source: The fluent source. Any common indent will be
@@ -443,14 +447,15 @@ def assert_serialize_failure(
error_unit: fluent.FluentUnit,
error_msg: str = r".+",
) -> None:
- """Assert that the given FluentFile fails to serialize.
+ """
+ Assert that the given FluentFile fails to serialize.
:param FluentFile fluent_file: The FluentFile to try and serialize.
:param FluentUnit error_unit: The FluentUnit that is expected to fail.
:param str error_msg: The expected syntax error for the unit.
"""
with raises(
- ValueError,
+ TypeError,
match=f'^Error in source of FluentUnit "{error_unit.getid()}":\\n',
):
self.fluent_serialize(fluent_file)
@@ -781,7 +786,7 @@ def subtest_whitespace(whitespace):
# line, as per fluent's rules.
fluent_file.units[0].source = "line 1 \n line 2 \nline 3 \n "
self.assert_serialize(
- fluent_file, "m =\n" " line 1 \n" " line 2 \n" " line 3\n"
+ fluent_file, "m =\n line 1 \n line 2 \n line 3\n"
)
def test_empty_unit_source(self):
@@ -990,15 +995,15 @@ def test_multiline_value(self):
# Trailing whitespace is preserved in fluent for all but the last line.
self.basic_test(
- "message = \n" " trailing \n" " whitespace \n" " last line \n",
+ "message = \n trailing \n whitespace \n last line \n",
[{"id": "message", "source": "trailing \nwhitespace \nlast line"}],
- "message =\n" " trailing \n" " whitespace \n" " last line\n",
+ "message =\n trailing \n whitespace \n last line\n",
)
# Starting on the same line, and with gap.
self.basic_test(
- "message = trailing \n" " whitespace \n" " \n" " last line \n",
+ "message = trailing \n whitespace \n \n last line \n",
[{"id": "message", "source": "trailing \nwhitespace \n\nlast line"}],
- "message =\n" " trailing \n" " whitespace \n" "\n" " last line\n",
+ "message =\n trailing \n whitespace \n\n last line\n",
)
def test_multiline_message_attributes(self):
@@ -1115,7 +1120,7 @@ def test_multiline_message_attributes(self):
[
{
"id": "message",
- "source": "Message\n" ".a =\n" "My multiline\n" "attribute",
+ "source": "Message\n.a =\nMy multiline\nattribute",
},
],
)
@@ -1133,7 +1138,7 @@ def test_multiline_message_attributes(self):
[
{
"id": "message",
- "source": "Message\n" ".a =\n" "My multiline\n" "\n" "gap",
+ "source": "Message\n.a =\nMy multiline\n\ngap",
},
],
"""\
@@ -1399,15 +1404,15 @@ def test_multiline_term_attributes(self):
# Trailing whitespace is preserved in fluent for all but the last line.
self.basic_test(
- "-term = \n" " trailing \n" " whitespace \n" " last line \n",
+ "-term = \n trailing \n whitespace \n last line \n",
[{"id": "-term", "source": "trailing \nwhitespace \nlast line"}],
- "-term =\n" " trailing \n" " whitespace \n" " last line\n",
+ "-term =\n trailing \n whitespace \n last line\n",
)
# Starting on the same line, and with gap.
self.basic_test(
- "-term = trailing \n" " whitespace \n" " \n" " last line \n",
+ "-term = trailing \n whitespace \n \n last line \n",
[{"id": "-term", "source": "trailing \nwhitespace \n\nlast line"}],
- "-term =\n" " trailing \n" " whitespace \n" "\n" " last line\n",
+ "-term =\n trailing \n whitespace \n\n last line\n",
)
def test_special_syntax_characters(self):
@@ -1450,10 +1455,7 @@ def subtest_special_syntax_characters(char, ok_at_start):
# Test with just the character on its own, or at the start of a
# value.
- if ok_at_start:
- escaped_char = char
- else:
- escaped_char = f'{{ "{char}" }}'
+ escaped_char = char if ok_at_start else f'{{ "{char}" }}'
for value in [char, f"{char}at start"]:
escaped_value = value.replace(char, escaped_char)
@@ -2009,8 +2011,10 @@ def test_detached_comment(self):
)
def test_resource_and_group_comment_prefixes(self):
- """Test that ResourceComment and GroupComment prefixes on Messages and
- Terms."""
+ """
+ Test that ResourceComment and GroupComment prefixes on Messages and
+ Terms.
+ """
# With both ResourceComments and GroupComments, we gain both on
# Messages or Terms, and they appear before their Comments.
self.basic_test(
@@ -2162,8 +2166,10 @@ def test_resource_and_group_comment_prefixes(self):
)
def test_reference(self):
- """Test fluent MessageReferences, TermReferences and
- VariableReferences."""
+ """
+ Test fluent MessageReferences, TermReferences and
+ VariableReferences.
+ """
# Test reference to a term or message.
self.basic_test(
"""\
@@ -2459,7 +2465,7 @@ def test_literals(self):
},
{
"id": "message",
- "source": '{ " " } space literal\n' ".attr = number { 79 }",
+ "source": """{ " " } space literal\n.attr = number { 79 }""",
},
],
)
@@ -3121,7 +3127,7 @@ def test_selectors(self):
[
{
"id": "m",
- "source": "{ $var ->\n" " *[other] none\n" "} { $var }",
+ "source": "{ $var ->\n *[other] none\n} { $var }",
"refs": ["$var"],
"parts": [
{
@@ -3475,7 +3481,8 @@ def test_unit_ids():
assert unit.getid() == ok_id
def test_duplicate_ids(self):
- """Test that we get a parsing error if an id is duplicated in the
+ """
+ Test that we get a parsing error if an id is duplicated in the
source.
"""
with raises(
diff --git a/tests/translate/storage/test_html.py b/tests/translate/storage/test_html.py
index 735fa3d911..65ce2aa296 100644
--- a/tests/translate/storage/test_html.py
+++ b/tests/translate/storage/test_html.py
@@ -16,7 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see .
-"""Tests for the HTML classes"""
+"""Tests for the HTML classes."""
from pytest import raises
@@ -24,7 +24,7 @@
def test_guess_encoding():
- """Read an encoding header to guess the encoding correctly"""
+ """Read an encoding header to guess the encoding correctly."""
h = html.htmlfile()
assert (
h.guess_encoding(
@@ -67,7 +67,7 @@ def test_self_closing_tags():
def test_escaping_script_and_pre():
"""