diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 7206336d..6667a059 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -17,7 +17,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
- python-version: '3.x'
+ python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
@@ -32,4 +32,4 @@ jobs:
run: |
RELEASE=${{ steps.get_version.outputs.VERSION }} python setup.py sdist bdist_wheel
twine upload dist/*
- READTHEDOCS_TOKEN="${{ secrets.READTHEDOCS_TOKEN }}" bin/publish-docs ${{ steps.get_version.outputs.VERSION }}
+ # READTHEDOCS_TOKEN="${{ secrets.READTHEDOCS_TOKEN }}" bin/publish-docs ${{ steps.get_version.outputs.VERSION }}
diff --git a/.github/workflows/test-neo34-py27.yml b/.github/workflows/test-neo34-py27.yml
index af966c7e..d8f7939a 100644
--- a/.github/workflows/test-neo34-py27.yml
+++ b/.github/workflows/test-neo34-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,24 @@ jobs:
env:
NEO4J_VERSION: '3.4'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo34-py35+.yml b/.github/workflows/test-neo34-py35+.yml
index fc6fe017..3489c51e 100644
--- a/.github/workflows/test-neo34-py35+.yml
+++ b/.github/workflows/test-neo34-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '3.4'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo35-py27.yml b/.github/workflows/test-neo35-py27.yml
index 71d27227..c04c8d08 100644
--- a/.github/workflows/test-neo35-py27.yml
+++ b/.github/workflows/test-neo35-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '3.5'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo35-py35+.yml b/.github/workflows/test-neo35-py35+.yml
index 6620a295..0005c0b5 100644
--- a/.github/workflows/test-neo35-py35+.yml
+++ b/.github/workflows/test-neo35-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '3.5'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo40-py27.yml b/.github/workflows/test-neo40-py27.yml
index f44e3339..f836181c 100644
--- a/.github/workflows/test-neo40-py27.yml
+++ b/.github/workflows/test-neo40-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.0'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo40-py35+.yml b/.github/workflows/test-neo40-py35+.yml
index 1775a8b0..92fd8522 100644
--- a/.github/workflows/test-neo40-py35+.yml
+++ b/.github/workflows/test-neo40-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.0'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo41-py27.yml b/.github/workflows/test-neo41-py27.yml
index 5af7d14f..f574db84 100644
--- a/.github/workflows/test-neo41-py27.yml
+++ b/.github/workflows/test-neo41-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.1'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo41-py35+.yml b/.github/workflows/test-neo41-py35+.yml
index 037a3058..9a5299ae 100644
--- a/.github/workflows/test-neo41-py35+.yml
+++ b/.github/workflows/test-neo41-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.1'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo42-py27.yml b/.github/workflows/test-neo42-py27.yml
index 8e7c6b8e..06fd8f12 100644
--- a/.github/workflows/test-neo42-py27.yml
+++ b/.github/workflows/test-neo42-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.2'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo42-py35+.yml b/.github/workflows/test-neo42-py35+.yml
index b6fe189a..78a6c85f 100644
--- a/.github/workflows/test-neo42-py35+.yml
+++ b/.github/workflows/test-neo42-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.2'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo43-py27.yml b/.github/workflows/test-neo43-py27.yml
index 2f59c611..ea99cadc 100644
--- a/.github/workflows/test-neo43-py27.yml
+++ b/.github/workflows/test-neo43-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.3'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo43-py35+.yml b/.github/workflows/test-neo43-py35+.yml
index 63aef762..3330b305 100644
--- a/.github/workflows/test-neo43-py35+.yml
+++ b/.github/workflows/test-neo43-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.3'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo44-py27.yml b/.github/workflows/test-neo44-py27.yml
index 1e71031a..45ccff76 100644
--- a/.github/workflows/test-neo44-py27.yml
+++ b/.github/workflows/test-neo44-py27.yml
@@ -9,7 +9,7 @@ on:
jobs:
test:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["2.7"]
@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: MatteoH2O1999/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- uses: actions/setup-java@v1
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.4'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/.github/workflows/test-neo44-py35+.yml b/.github/workflows/test-neo44-py35+.yml
index b00bc87f..ef14d9cf 100644
--- a/.github/workflows/test-neo44-py35+.yml
+++ b/.github/workflows/test-neo44-py35+.yml
@@ -28,7 +28,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
- pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Perform legal checks
@@ -42,8 +41,16 @@ jobs:
env:
NEO4J_VERSION: '4.4'
+ - name: Set up Python for coverage
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.12
+ - name: Create lcov coverage report
+ run: |
+ pip install coverage
+ coverage lcov --rcfile="test/.coveragerc"
- name: Upload coverage
- run: coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ uses: coverallsapp/github-action@master
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ path-to-lcov: coverage.lcov
diff --git a/README.rst b/README.rst
index ec3222b5..cdb71371 100644
--- a/README.rst
+++ b/README.rst
@@ -3,6 +3,9 @@ Py2neo
** **Please now consider this project to be EOL. There will be no more updates. It is recommended to use the official Neo4j drivers instead.** **
+The version history of py2neo on PyPI got lost.
+If you need an older version, you can install [`py2neo-history`](https://pypi.org/project/py2neo-history/) instead.
+
.. image:: https://img.shields.io/pypi/v/py2neo.svg
:target: https://pypi.python.org/pypi/py2neo
:alt: PyPI version
@@ -11,12 +14,12 @@ Py2neo
:target: https://pypi.python.org/pypi/py2neo
:alt: PyPI Downloads
-.. image:: https://img.shields.io/github/license/technige/py2neo.svg
+.. image:: https://img.shields.io/github/license/neo4j-contrib/py2neo.svg
:target: https://www.apache.org/licenses/LICENSE-2.0
:alt: License
-.. image:: https://coveralls.io/repos/github/technige/py2neo/badge.svg?branch=master
- :target: https://coveralls.io/github/technige/py2neo?branch=master
+.. image:: https://coveralls.io/repos/github/neo4j-contrib/py2neo/badge.svg?branch=master
+ :target: https://coveralls.io/github/neo4j-contrib/py2neo?branch=master
:alt: Coverage Status
@@ -107,58 +110,58 @@ More
For more information, read the `handbook `_.
-.. |test-neo44-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo44-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo44-py27"
+.. |test-neo44-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo44-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo44-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.4 using py27
-.. |test-neo44-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo44-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo44-py35+"
+.. |test-neo44-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo44-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo44-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.4 using py35+
-.. |test-neo43-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo43-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo43-py27"
+.. |test-neo43-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo43-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo43-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.3 using py27
-.. |test-neo43-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo43-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo43-py35+"
+.. |test-neo43-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo43-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo43-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.3 using py35+
-.. |test-neo42-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo42-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo42-py27"
+.. |test-neo42-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo42-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo42-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.2 using py27
-.. |test-neo42-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo42-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo42-py35+"
+.. |test-neo42-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo42-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo42-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.2 using py35+
-.. |test-neo41-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo41-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo41-py27"
+.. |test-neo41-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo41-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo41-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.1 using py27
-.. |test-neo41-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo41-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo41-py35+"
+.. |test-neo41-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo41-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo41-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.1 using py35+
-.. |test-neo40-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo40-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo40-py27"
+.. |test-neo40-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo40-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo40-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.0 using py27
-.. |test-neo40-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo40-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo40-py35+"
+.. |test-neo40-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo40-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo40-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 4.0 using py35+
-.. |test-neo35-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo35-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo35-py27"
+.. |test-neo35-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo35-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo35-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 3.5 using py27
-.. |test-neo35-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo35-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo35-py35+"
+.. |test-neo35-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo35-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo35-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 3.5 using py35+
-.. |test-neo34-py27| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo34-py27
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo34-py27"
+.. |test-neo34-py27| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo34-py27.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo34-py27.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 3.4 using py27
-.. |test-neo34-py35+| image:: https://img.shields.io/github/workflow/status/technige/py2neo/test-neo34-py35+
- :target: https://github.com/technige/py2neo/actions?query=workflow%3A"test-neo34-py35+"
+.. |test-neo34-py35+| image:: https://img.shields.io/github/actions/workflow/status/neo4j-contrib/py2neo/test-neo34-py35+.yml
+ :target: https://github.com/neo4j-contrib/py2neo/actions/workflows/test-neo34-py35+.yml?query=branch%3Amaster
:alt: GitHub workflow status for tests against Neo4j 3.4 using py35+
diff --git a/bin/check-legal b/bin/check-legal
index b56f1a36..2fe9d4bb 100755
--- a/bin/check-legal
+++ b/bin/check-legal
@@ -10,7 +10,8 @@ function check_license {
function check_copyright {
- YEAR=$(date +%Y)
+ #YEAR=$(date +%Y)
+ YEAR=2021 # when this project was abandoned
find "${SRC}" -type d \( -path "${SRC}/vendor" \) -prune -o -name '*.py' -print0 | xargs -0 grep -L -e "Copyright.* 20[0-9][0-9]-${YEAR}"
}
diff --git a/bin/package-history.sh b/bin/package-history.sh
new file mode 100755
index 00000000..0966999f
--- /dev/null
+++ b/bin/package-history.sh
@@ -0,0 +1,180 @@
+#!/usr/bin/env bash
+
+set -ex
+
+function prepend_readme {
+ if [ -f README ]
+ then
+ readme_fn=README
+ elif [ -f README.rst ]; then
+ readme_fn=README.rst
+ elif [ -f README.md ]; then
+ readme_fn=README.md
+ else
+ echo "README not found!" 1>&2
+ exit 1
+ fi
+ echo -n "This is a copy of the py2neo package to restore the version history that got deleted.
+It's not possible to re-upload a deleted version of a package to PyPI.
+So if you rely on, for example, \`py2neo ~= 4.1.0\` in your project, you can simply switch to \`py2neo-history ~= 4.1.0\`.
+If your project works with py2neo 2021.2.3 or above, you can keep using \`py2neo\` as usual.
+
+Note that this project will not get any updates past version 2021.2.3.
+
+" | cat - $readme_fn > README.tmp && mv README.tmp $readme_fn
+}
+
+function check_install {
+ python3.8 -m venv venv_vendor
+ . venv_vendor/bin/activate
+ pip install .
+ deactivate
+ rm -r venv_vendor
+}
+
+function check_sed_changes() {
+ if [ ! -f sed_changes.tmp ] || [ ! -s sed_changes.tmp ]
+ then
+ rm sed_changes.tmp
+ echo "sed_changes.tmp is empty!" 1>&2
+ exit 1
+ fi
+ rm sed_changes.tmp
+}
+
+function vendor_packages {
+ if grep -q -e 'pansi' setup.py
+ then
+ if ! grep -q -e 'pansi>=2020.7.3' setup.py
+ then
+ echo "found unexpected version of pansi" 1>&2
+ exit 1
+ fi
+ sed -i "s/pansi>=2020.7.3/pansi==2020.7.3/" setup.py
+ if ! grep -q -e 'six' setup.py
+ then
+ echo "pansi 2020.7.3 requires six, but py2neo doesn't, so vendoring requires extra steps" 1>&2
+ exit 1
+ fi
+ rm -rf vendor_dist
+ mkdir vendor_dist
+ python3.8 -m venv venv_vendor
+ . venv_vendor/bin/activate
+ pip install .
+
+ #sed -i "s/\(from os import .*\)/\1, getcwd/w sed_changes.tmp" setup.py
+ #check_sed_changes
+ #sed -i 's/\("packages": *find_packages(.*)\),/\1 + ["vendor_dist"],/w sed_changes.tmp' setup.py
+ #check_sed_changes
+ #sed -i 's/\( *\)\("package_data": *{\)/\1\2\n\1 "vendor_dist": ["*"],/w sed_changes.tmp' setup.py
+ #check_sed_changes
+
+ for dep in pansi
+ do
+ path=$(python -c "import $dep; print($dep.__path__[0])")
+ #if path=$(python -c "import $dep; print($dep.__path__[0])")
+ #if version=$(python -c "from importlib.metadata import version; print(version('$dep'))")
+ #then
+ #url=$(curl -s "https://pypi.org/pypi/$dep/json" | jq -r '.releases."'"$version"'" | map(select(.packagetype == "sdist"))[0].url')
+ #fn=$(echo "$url" | rev | cut -d/ -f1 | rev)
+ #curl "$url" -o "vendor_dist/$fn"
+ #if [ ! -f "vendor_dist/$fn" ] || [ ! -s "vendor_dist/$fn" ]
+ #then
+ # echo "downloaded file missing!" 1>&2
+ # exit 1
+ #fi
+ #
+ #sed -i "s|\"$dep.*\",|\"$dep @ file://localhost/{}/vendor_dist/$fn\".format(getcwd()),|w sed_changes.tmp" setup.py
+ #check_sed_changes
+ #
+ #dep_name_count=$(grep -r --include "*.py" $dep py2neo | wc -l)
+ #dep_import_count=$(grep -r --include "*.py" -E "from +$dep.* +import " py2neo | wc -l)
+ #if [ "$dep_name_count" -ne "$dep_import_count" ]
+ #then
+ # grep -r --include "*.py" $dep py2neo
+ # grep -r --include "*.py" -E "from +$dep.* +import " py2neo
+ # echo "dep_name_count = $dep_name_count != $dep_import_count = dep_import_count" 1>&2
+ # exit 1
+ #fi
+ # vendor with relative imports
+ # while IFS= read -r -d '' fn
+ # do
+ # depth=$(echo "$fn" | grep -o "/" | wc -l)
+ # dots=$(for (( i = 0; i < "$depth"; ++i )); do echo -n "."; done)
+ # sed -i "s/\(from \+\)$dep\(.*import \+\)/\1${dots}_${dep}\2/g" "$fn"
+ # done < <(find py2neo -type f -name '*.py' -print0)
+ # vendor with absolute imports
+ find py2neo -type f -name '*.py' -print0 | xargs -0 sed -i "s/\(from \+\)$dep\(.*import \+\)/\1py2neo.vendor.$dep\2/g"
+ grep -r --include "*.py" "$dep" py2neo
+ echo "manually check this grep output!"
+ sed -i "/\"$dep/d" setup.py
+ mkdir -p py2neo/vendor
+ touch py2neo/vendor/__init__.py
+ rm -rf py2neo/vendor/$dep
+ cp -r $path py2neo/vendor/$dep
+ #fi
+ done
+ #deactivate
+ #rm -r venv_vendor
+ fi
+}
+
+# releases with version and package hard-coded py2neo/__init__.py
+for tag in release/1.6.2 release/1.6.3 py2neo-2.0 py2neo-2.0.1 py2neo-2.0.2 py2neo-2.0.3 py2neo-2.0.4 py2neo-2.0.5 py2neo-2.0.6 py2neo-2.0.7 py2neo-2.0.8 py2neo-2.0.9 py2neo-3.0.0 py2neo-3.1.0 py2neo-3.1.1 py2neo-3.1.2
+do
+ export PATCHED_VERSION=$tag
+ git checkout $tag
+ prepend_readme
+ check_install
+ sed -i "s/\"name\": .*,/\"name\": \"py2neo-history\",/" setup.py
+ sed -i "s/name=.*,/name=\"py2neo-history\",/" setup.py
+ python setup.py sdist
+ git checkout -- .
+done
+
+# releases with version and package hard-coded py2neo/meta.py
+for tag in py2neo-4.0.0b1 py2neo-4.0.0b2 py2neo-4.0.0 py2neo-4.1.0 py2neo-4.1.1 py2neo-4.1.2 py2neo-4.1.3 py2neo-4.3.0
+do
+ export PATCHED_VERSION=$tag
+ git checkout $tag
+ prepend_readme
+ check_install
+ sed -i "s/\"name\": .*,/\"name\": \"py2neo-history\",/" setup.py
+ python setup.py sdist
+ git checkout -- py2neo/meta.py
+ git checkout -- .
+done
+
+# releases with dev version (ending in .dev0) and package hard-coded py2neo/__init__.py
+for tag in 5.0b2 5.0b3 5.0b4 5.0b5 2020.7b6
+do
+ export PATCHED_VERSION=$tag
+ git checkout $tag
+ prepend_readme
+ check_install
+ sed -i "s/\"name\": .*,/\"name\": \"py2neo-history\",/" setup.py
+ sed -i "s/__version__ = .*/__version__ = \"$tag\"/" py2neo/__init__.py
+ python setup.py sdist
+ git checkout -- .
+done
+
+# releases with dummy version loaded from VERSION file
+for tag in 2020.0b9 2020.0rc1 2020.0.0 2020.1a1 2020.1.0 2020.1.1 2020.7b7 2020.7b8 2021.0.0 2021.0.1 2021.1.0 2021.1.1 2021.1.2 2021.1.3 2021.1.4 2021.1.5 2021.2.0 2021.2.1 2021.2.2 2021.2.3
+do
+ export PATCHED_VERSION=$tag
+ git checkout $tag
+ vendor_packages
+ prepend_readme
+ sed -i "s/PACKAGE_NAME = .*/PACKAGE_NAME = \"py2neo-history\"/" py2neo/meta.py
+ echo -n $tag > py2neo/VERSION
+ python setup.py sdist
+ git checkout -- .
+done
+
+# now call
+#twine upload dist/py2neo-history-*.tar.gz
+
+# clean-up for development
+#git checkout -- .; rm -r dist sed_changes.tmp vendor_dist interchange py2neo/interchange py2neo/_interchange py2neo/vendor/interchange pansi py2neo/pansi py2neo/_pansi py2neo/vendor/pansi; rm -r venv_vendor/
+# test sdist for development
+#deactivate; rm -r venv_tmp/; virtualenv -p 35 venv_tmp; source venv_tmp/bin/activate.fish; pip install dist/py2neo-history-2020.0b9.tar.gz
diff --git a/bin/test b/bin/test
index e0d0c9dd..023747bd 100755
--- a/bin/test
+++ b/bin/test
@@ -51,4 +51,3 @@ run_unit_tests
run_integration_tests
coverage report --rcfile="${COVERAGERC}"
-coverage xml --rcfile="${COVERAGERC}"
diff --git a/test/integration/conftest.py b/test/integration/conftest.py
index dfffd8af..b40447b2 100644
--- a/test/integration/conftest.py
+++ b/test/integration/conftest.py
@@ -134,6 +134,9 @@ def generate_uri(self, service_name=None):
elif NEO4J_VERSION == "4.x":
neo4j_deployment_profiles = [profile for profile in neo4j_deployment_profiles
if profile.release[0] == 4]
+elif NEO4J_VERSION == "4.4":
+ neo4j_deployment_profiles = [profile for profile in neo4j_deployment_profiles
+ if profile.release == (4, 4)]
elif NEO4J_VERSION == "4.3":
neo4j_deployment_profiles = [profile for profile in neo4j_deployment_profiles
if profile.release == (4, 3)]
diff --git a/test/requirements.txt b/test/requirements.txt
index 554fb0b1..39e9db4d 100644
--- a/test/requirements.txt
+++ b/test/requirements.txt
@@ -1,5 +1,6 @@
+. # py2neo
+./test/vendor/grolt
+
coverage
-coveralls
-grolt>=1.0.6
pytest
pytest-threadleak
diff --git a/test/vendor/grolt/.coveragerc b/test/vendor/grolt/.coveragerc
new file mode 100644
index 00000000..b8fd87dd
--- /dev/null
+++ b/test/vendor/grolt/.coveragerc
@@ -0,0 +1,26 @@
+[run]
+branch = True
+source = boltkit
+omit =
+ **/__main__.py
+
+[report]
+# Regexes for lines to exclude from consideration
+exclude_lines =
+ # Have to re-enable the standard pragma
+ pragma: no cover
+
+ # Don't complain about missing debug-only code:
+ def __repr__
+ if self\.debug
+
+ # Don't complain if tests don't hit defensive assertion code:
+ raise AssertionError
+ raise NotImplementedError
+
+ # Don't complain if non-runnable code isn't run:
+ if 0:
+ if __name__ == .__main__.:
+
+ignore_errors = True
+show_missing = True
diff --git a/test/vendor/grolt/.gitignore b/test/vendor/grolt/.gitignore
new file mode 100644
index 00000000..27d8c202
--- /dev/null
+++ b/test/vendor/grolt/.gitignore
@@ -0,0 +1,15 @@
+*~
+*.py[co]
+__pycache__
+.pytest_cache
+.idea
+
+*.egg-info
+dist
+build
+
+neo4j-community-*
+neo4j-enterprise-*
+
+.coverage
+.tox
diff --git a/test/vendor/grolt/LICENSE b/test/vendor/grolt/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/test/vendor/grolt/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/test/vendor/grolt/MANIFEST.in b/test/vendor/grolt/MANIFEST.in
new file mode 100644
index 00000000..cc47ef17
--- /dev/null
+++ b/test/vendor/grolt/MANIFEST.in
@@ -0,0 +1,3 @@
+include LICENSE NOTICE
+exclude README.md
+recursive-exclude test *
diff --git a/test/vendor/grolt/NOTICE b/test/vendor/grolt/NOTICE
new file mode 100644
index 00000000..7a395a5c
--- /dev/null
+++ b/test/vendor/grolt/NOTICE
@@ -0,0 +1,2 @@
+grolt
+Copyright (c) 2011-2021 Nigel Small
diff --git a/test/vendor/grolt/README.md b/test/vendor/grolt/README.md
new file mode 100644
index 00000000..b5d60ad8
--- /dev/null
+++ b/test/vendor/grolt/README.md
@@ -0,0 +1,73 @@
+# Grolt
+
+Grolt is an interactive Docker-based tool for running Neo4j servers and clusters in development.
+
+**Please ensure that you have an appropriate license for running Neo4j**
+
+
+## Usage
+
+To install Grolt, simply run:
+```
+pip install grolt
+```
+
+The primary interface is through the CLI, so to see the options available, use:
+```
+grolt --help
+```
+
+To start a single standalong instance with the latest Neo4j release, use:
+
+```
+grolt
+```
+
+To start a 3-core cluster with, use the command:
+
+```
+grolt -c 3
+```
+
+To start a 3-core cluster with additional configuration options you use:
+
+```
+grolt -c 3 -C dbms.default_database=mygraph
+```
+
+## Interacting with containers via docker
+
+To view the running docker containers:
+```
+docker ps
+```
+
+To tail the main neo4j logs of an instance:
+```
+docker logs -f
+```
+
+To tail the debug logs of an instance:
+```
+docker exec tail -f /logs/debug.log
+```
+
+To pause and unpause an instance:
+```
+docker pause
+docker unpause
+```
+
+## Running your latest code using grolt
+
+First, you have to build neo4j tarballs:
+```
+cd
+mvn package -DskipTests -Dcheckstyle.skip -Dlicense.skip -Dlicensing.skip -Denforcer.skip -T2C
+```
+
+Then you can run grolt with them
+```
+cd
+grolt -c 3 --neo4j-source-dir "$(pwd)" --user "$(whoami)"
+```
diff --git a/test/vendor/grolt/grolt/__init__.py b/test/vendor/grolt/grolt/__init__.py
new file mode 100644
index 00000000..ca13cb7d
--- /dev/null
+++ b/test/vendor/grolt/grolt/__init__.py
@@ -0,0 +1,1021 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import division
+
+from collections import namedtuple
+from inspect import getmembers
+from logging import getLogger
+from math import ceil
+from os import makedirs
+from os.path import isdir, join as path_join
+from random import choice
+from shlex import split as shlex_split
+from threading import Thread
+from time import sleep
+from xml.etree import ElementTree
+from webbrowser import open as open_browser
+
+import click
+from click import BadParameter, ClickException
+from docker import DockerClient
+from docker.errors import APIError, ImageNotFound
+from monotonic import monotonic
+from py2neo import ServiceProfile, ConnectionProfile, ConnectionUnavailable
+from py2neo.addressing import Address
+from py2neo.client import Connector, Connection
+from packaging.version import InvalidVersion
+
+from six.moves import input
+
+from grolt.images import is_legacy_image, resolve_image
+from grolt.security import Auth, make_auth
+
+
+docker = DockerClient.from_env(version="auto")
+
+log = getLogger(__name__)
+
+debug_opts_type = namedtuple("debug_opts_type", ["suspend", "port"])
+
+
+def port_range(base_port, count):
+ if base_port:
+ return list(range(base_port, base_port + count))
+ else:
+ return [0] * count
+
+
+class Neo4jDirectorySpec(object):
+
+ def __init__(self,
+ certificates_dir=None,
+ import_dir=None,
+ logs_dir=None,
+ plugins_dir=None,
+ shared_dirs=None,
+ neo4j_source_dir=None,
+ ):
+ self.certificates_dir = certificates_dir
+ self.import_dir = import_dir
+ self.logs_dir = logs_dir
+ self.plugins_dir = plugins_dir
+ self.shared_dirs = shared_dirs
+ self.neo4j_source_dir = neo4j_source_dir
+
+ def volumes(self, name):
+ volumes = {}
+ if self.certificates_dir:
+ # The certificate directory needs to be shared as rw to
+ # allow Neo4j to perform 'chown'.
+ log.debug("Sharing directory %r for certificates (rw)", self.certificates_dir)
+ volumes[self.certificates_dir] = {
+ "bind": "/var/lib/neo4j/certificates",
+ "mode": "rw",
+ }
+ if self.import_dir:
+ log.debug("Sharing directory %r for imports (ro)", self.import_dir)
+ volumes[self.import_dir] = {
+ "bind": "/var/lib/neo4j/import",
+ "mode": "ro",
+ }
+ if self.logs_dir:
+ log.debug("Sharing directory %r for logs (rw)", self.logs_dir)
+ volumes[path_join(self.logs_dir, name)] = {
+ "bind": "/var/lib/neo4j/logs",
+ "mode": "rw",
+ }
+ if self.plugins_dir:
+ log.debug("Sharing directory %r for plugins (ro)", self.plugins_dir)
+ volumes[self.plugins_dir] = {
+ "bind": "/plugins",
+ "mode": "ro",
+ }
+ if self.shared_dirs:
+ for shared_dir in self.shared_dirs:
+ log.debug("Sharing directory %r as %r", shared_dir.source, shared_dir.destination)
+ volumes[shared_dir.source] = {
+ "bind": shared_dir.destination,
+ "mode": "rw",
+ }
+ if self.neo4j_source_dir:
+ pom = ElementTree.parse(self.neo4j_source_dir + "/pom.xml").getroot()
+ xml_tag_prefix = pom.tag.split("project")[0]
+ neo4j_version = pom.find(xml_tag_prefix+"version").text
+ lib_dir = ("{}/private/packaging/standalone/target/"
+ "neo4j-enterprise-{}-unix/neo4j-enterprise-{}/"
+ "lib".format(self.neo4j_source_dir, neo4j_version, neo4j_version))
+ bin_dir = ("{}/private/packaging/standalone/target/"
+ "neo4j-enterprise-{}-unix/neo4j-enterprise-{}/"
+ "bin".format(self.neo4j_source_dir, neo4j_version, neo4j_version))
+ if not isdir(lib_dir):
+ raise Exception("Could not find packaged neo4j source at {}\n"
+ "Perhaps you need to run `mvn package`?".format(lib_dir))
+
+ volumes[lib_dir] = {
+ "bind": "/var/lib/neo4j/lib/",
+ "mode": "ro",
+ }
+ volumes[bin_dir] = {
+ "bind": "/var/lib/neo4j/bin/",
+ "mode": "ro",
+ }
+
+ return volumes
+
+
+class Neo4jMachineSpec(object):
+ # Base config for all machines. This can be overridden by
+ # individual instances.
+ config = {
+ "dbms.backup.enabled": "false",
+ "dbms.transaction.bookmark_ready_timeout": "5s",
+ }
+
+ discovery_port = 5000
+ transaction_port = 6000
+ raft_port = 7000
+ debug_port = 5100
+ bolt_internal_port = 7688
+
+ def __init__(self, name, service_name, image,
+ bolt_port, http_port, https_port, debug_opts,
+ dir_spec, config, env):
+ self.name = name
+ self.service_name = service_name
+ self.image = image
+ self.bolt_port = bolt_port
+ self.http_port = http_port
+ self.https_port = https_port
+ self.dir_spec = dir_spec
+ self.debug_opts = debug_opts
+ self.env = dict(env or {})
+ self.config = dict(self.config or {})
+ if debug_opts.port:
+ self._add_debug_opts(debug_opts)
+ self.config["dbms.connector.bolt.advertised_address"] = \
+ "localhost:{}".format(self.bolt_port)
+ self.config["dbms.connector.http.advertised_address"] = \
+ "localhost:{}".format(self.http_port)
+ self.config["dbms.connector.https.advertised_address"] = \
+ "localhost:{}".format(self.https_port)
+ self.config["dbms.routing.advertised_address"] = \
+ self.bolt_internal_address
+ if self.dir_spec and self.dir_spec.certificates_dir and not is_legacy_image(self.image):
+ self.config.update({
+ "dbms.ssl.policy.bolt.enabled": True,
+ "dbms.ssl.policy.https.enabled": True,
+ "dbms.connector.bolt.tls_level": "OPTIONAL",
+ "dbms.connector.https.enabled": True,
+ })
+ if config:
+ self.config.update(**config)
+
+ def __hash__(self):
+ return hash((self.name, self.service_name))
+
+ @property
+ def dbms_mode(self):
+ return self.config.get("dbms.mode")
+
+ @property
+ def fq_name(self):
+ return "{}.{}".format(self.name, self.service_name)
+
+ @property
+ def discovery_address(self):
+ return "{}:{}".format(self.fq_name, self.discovery_port)
+
+ @property
+ def bolt_internal_address(self):
+ return "{}:{}".format(self.fq_name, self.bolt_internal_port)
+
+ def _add_debug_opts(self, debug_opts):
+ if debug_opts.port is not None:
+ suspend = "y" if debug_opts.suspend else "n"
+ self.env["JAVA_TOOL_OPTIONS"] = (
+ "-agentlib:jdwp=transport=dt_socket,server=y,"
+ "suspend={},address=*:{}".format(suspend, self.debug_port)
+ )
+
+
+class Neo4jMachine(object):
+ """ A single Neo4j server instance, potentially part of a cluster.
+ """
+
+ container = None
+
+ ip_address = None
+
+ ready = 0
+
+ def __init__(self, spec, image, auth, user):
+ self.spec = spec
+ self.image = image
+ self.address = Address(("localhost", self.spec.bolt_port))
+ self.auth = auth
+ self.profiles = {
+ "bolt": ConnectionProfile(scheme="bolt", port=self.spec.bolt_port, auth=self.auth),
+ "http": ConnectionProfile(scheme="http", port=self.spec.http_port, auth=self.auth),
+ "https": ConnectionProfile(scheme="https", port=self.spec.https_port, auth=self.auth),
+ }
+ environment = {}
+ if self.auth:
+ environment["NEO4J_AUTH"] = "/".join(self.auth)
+ environment["NEO4J_ACCEPT_LICENSE_AGREEMENT"] = "yes"
+ for key, value in self.spec.config.items():
+ fixed_key = "NEO4J_" + key.replace("_", "__").replace(".", "_")
+ environment[fixed_key] = value
+ for key, value in self.spec.env.items():
+ environment[key] = value
+ ports = {"7474/tcp": self.spec.http_port,
+ "7473/tcp": self.spec.https_port,
+ "7687/tcp": self.spec.bolt_port}
+ if self.spec.debug_opts.port is not None:
+ ports["5100/tcp"] = self.spec.debug_opts.port
+ if self.spec.dir_spec:
+ volumes = self.spec.dir_spec.volumes(self.spec.name)
+ for path in volumes:
+ try:
+ makedirs(path)
+ except OSError:
+ pass
+ else:
+ volumes = None
+ try:
+ user = int(user)
+ except TypeError:
+ user = None
+ except ValueError:
+ # Note: this will only work on Unix.
+ from pwd import getpwnam
+ user = getpwnam(user).pw_uid
+
+ def create_container(img):
+ return docker.containers.create(
+ img,
+ detach=True,
+ environment=environment,
+ hostname=self.spec.fq_name,
+ name=self.spec.fq_name,
+ network=self.spec.service_name,
+ ports=ports,
+ user=user,
+ volumes=volumes,
+ )
+
+ try:
+ self.container = create_container(self.image)
+ except ImageNotFound:
+ log.info("Downloading Docker image %r", self.image)
+ docker.images.pull(self.image)
+ self.container = create_container(self.image)
+
+ def __hash__(self):
+ return hash(self.container)
+
+ def __repr__(self):
+ return "%s(fq_name={!r}, image={!r}, address={!r})".format(
+ self.__class__.__name__, self.spec.fq_name,
+ self.image, self.address)
+
+ def start(self):
+ log.info("Starting machine %r at "
+ "«%s»", self.spec.fq_name, self.address)
+ try:
+ self.container.start()
+ self.container.reload()
+ self.ip_address = (self.container.attrs["NetworkSettings"]
+ ["Networks"][self.spec.service_name]["IPAddress"])
+ except APIError as error:
+ log.info(error)
+
+ log.debug(u"Machine %r has internal IP address "
+ u"«%s»", self.spec.fq_name, self.ip_address)
+
+ def restart(self):
+ log.info("Restarting machine %r at "
+ "«%s»", self.spec.fq_name, self.address)
+ try:
+ self.container.restart()
+ self.container.reload()
+ self.ip_address = (self.container.attrs["NetworkSettings"]
+ ["Networks"][self.spec.service_name]["IPAddress"])
+ except APIError as error:
+ log.info(error)
+
+ log.debug("Machine %r has internal IP address "
+ "«%s»", self.spec.fq_name, self.ip_address)
+
+ def _poll_connection(self, port_name, timeout=0):
+ """ Repeatedly attempt to open a connection to a Bolt server.
+ """
+ t0 = monotonic()
+ profile = self.profiles[port_name]
+ log.debug("Trying to open connection to %s", profile)
+ errors = set()
+ again = True
+ wait = 0.1
+ while again:
+ try:
+ cx = Connection.open(profile)
+ except InvalidVersion as e:
+ log.info("Encountered invalid Neo4j version '%s'. Continuing anyway (this is a dev tool)", e)
+ return None
+ except ConnectionUnavailable as e:
+ errors.add(" ".join(map(str, e.args)))
+ else:
+ if cx:
+ return cx
+ again = monotonic() - t0 < (timeout or 0)
+ if again:
+ sleep(wait)
+ wait *= 2
+ log.error("Could not open connection to %s (%r)", profile, errors)
+ raise ConnectionUnavailable("Could not open connection")
+
+ def ping(self, timeout):
+ try:
+ cx = self._poll_connection("bolt", timeout=timeout)
+ if cx is not None:
+ cx.close()
+ cx = self._poll_connection("http", timeout=timeout)
+ if cx is not None:
+ cx.close()
+ log.info("Machine {!r} available".format(self.spec.fq_name))
+ except ConnectionUnavailable:
+ log.info("Machine {!r} unavailable".format(self.spec.fq_name))
+
+ def await_started(self, timeout):
+ sleep(1)
+ self.container.reload()
+ if self.container.status == "running":
+ try:
+ self.ping(timeout)
+ except OSError:
+ self.container.reload()
+ state = self.container.attrs["State"]
+ if state["Status"] == "exited":
+ self.ready = -1
+ log.error("Machine %r exited with code %r",
+ self.spec.fq_name, state["ExitCode"])
+ for line in self.container.logs().splitlines():
+ log.error("> %s" % line.decode("utf-8"))
+ else:
+ log.error("Machine %r did not become available "
+ "within %rs", self.spec.fq_name, timeout)
+ else:
+ self.ready = 1
+ else:
+ log.error("Machine %r is not running (status=%r)",
+ self.spec.fq_name, self.container.status)
+ for line in self.container.logs().splitlines():
+ log.error("> %s" % line.decode("utf-8"))
+
+ def stop(self, timeout=None):
+ log.info("Stopping machine %r", self.spec.fq_name)
+ self.container.stop(timeout=timeout)
+ self.container.remove(force=True)
+
+ def uri(self, scheme):
+ """ Return a URI targeting this machine for a given URI scheme.
+ """
+ if scheme in ("neo4j", "neo4j+s", "neo4j+ssc", "bolt", "bolt+s", "bolt+ssc"):
+ port = self.spec.bolt_port
+ elif scheme == "http":
+ port = self.spec.http_port
+ elif scheme in ("https", "http+s", "http+ssc"):
+ port = self.spec.https_port
+ else:
+ raise ValueError("Unsupported URI scheme %r", scheme)
+ return "{}://localhost:{}".format(scheme, port)
+
+
+class Neo4jService(object):
+ """ A Neo4j database management service.
+ """
+
+ default_image = NotImplemented
+
+ default_bolt_port = 7687
+ default_http_port = 7474
+ default_https_port = 7473
+ default_debug_port = 5005
+
+ def __new__(cls, name=None, image=None, auth=None, user=None,
+ n_cores=None, n_replicas=None,
+ bolt_port=None, http_port=None, https_port=None,
+ debug_port=None, debug_suspend=None,
+ dir_spec=None, config=None, env=None):
+ if n_cores:
+ return object.__new__(Neo4jClusterService)
+ else:
+ return object.__new__(Neo4jStandaloneService)
+
+ @classmethod
+ def _random_name(cls):
+ return "".join(choice("bcdfghjklmnpqrstvwxz") for _ in range(7))
+
+ # noinspection PyUnusedLocal
+ def __init__(self, name=None, image=None, auth=None, user=None,
+ n_cores=None, n_replicas=None,
+ bolt_port=None, http_port=None, https_port=None,
+ debug_port=None, debug_suspend=None, dir_spec=None,
+ config=None, env=None):
+ self.name = name or self._random_name()
+ self.image = resolve_image(image or self.default_image)
+ self.auth = Auth(*auth) if auth else make_auth()
+ if self.auth.user != "neo4j":
+ raise ValueError("Auth user must be 'neo4j' or empty")
+ self.user = user
+ self.machines = {}
+ self.network = None
+ self.console = None
+
+ def __enter__(self):
+ try:
+ self.start(timeout=300)
+ except KeyboardInterrupt:
+ self.stop(timeout=300)
+ raise
+ else:
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.stop()
+
+ def boot(self):
+ for spec, machine in self.machines.items():
+ if machine is None:
+ self.machines[spec] = Neo4jMachine(spec, self.image, self.auth, self.user)
+
+ def routers(self):
+ return list(self.machines.values())
+
+ def _for_each_machine(self, f):
+ threads = []
+ for spec, machine in self.machines.items():
+ thread = Thread(target=f(machine))
+ thread.daemon = True
+ thread.start()
+ threads.append(thread)
+ for thread in threads:
+ thread.join()
+
+ def start(self, timeout=None):
+ log.info("Starting service %r with image %r", self.name, self.image)
+ self.network = docker.networks.create(self.name)
+ self._for_each_machine(lambda machine: machine.start)
+ if timeout is not None:
+ self.await_started(timeout)
+
+ def await_started(self, timeout):
+
+ def wait(machine):
+ machine.await_started(timeout=timeout)
+
+ self._for_each_machine(wait)
+ if all(machine.ready == 1 for spec, machine in self.machines.items()):
+ log.info("Service %r available", self.name)
+ else:
+ raise RuntimeError(("Service %r unavailable - "
+ "some machines failed") % self.name)
+
+ def stop(self, timeout=None):
+ log.info("Stopping service %r", self.name)
+
+ def _stop(machine):
+ machine.stop(timeout)
+
+ self._for_each_machine(_stop)
+ if self.network:
+ self.network.remove()
+
+ def run_console(self):
+ self.console = Neo4jConsole(self)
+ self.console.invoke("env")
+ self.console.run()
+
+ def env(self):
+ auth = "{}:{}".format(self.auth.user, self.auth.password)
+ return {
+ "BOLT_SERVER_ADDR": " ".join(str(router.address) for router in self.routers()),
+ "NEO4J_AUTH": auth,
+ }
+
+
+class Neo4jStandaloneService(Neo4jService):
+ default_image = "neo4j:latest"
+
+ def __init__(self, name=None, image=None, auth=None, user=None,
+ n_cores=None, n_replicas=None,
+ bolt_port=None, http_port=None, https_port=None, debug_port=None,
+ debug_suspend=None, dir_spec=None, config=None, env=None):
+ super(Neo4jStandaloneService, self).__init__(name, image, auth, user, n_cores, n_replicas,
+ bolt_port, http_port, https_port, dir_spec,
+ config, env)
+ spec = Neo4jMachineSpec(
+ name="a",
+ service_name=self.name,
+ image=self.image,
+ bolt_port=bolt_port or self.default_bolt_port,
+ http_port=http_port or self.default_http_port,
+ https_port=https_port or self.default_https_port,
+ debug_opts=debug_opts_type(debug_suspend, debug_port),
+ dir_spec=dir_spec,
+ config=config,
+ env=env,
+ )
+ self.machines[spec] = None
+ self.boot()
+
+
+class Neo4jClusterService(Neo4jService):
+ default_image = "neo4j:enterprise"
+
+ # The minimum and maximum number of cores permitted
+ min_cores = 3
+ max_cores = 7
+
+ # The minimum and maximum number of read replicas permitted
+ min_replicas = 0
+ max_replicas = 9
+
+ default_bolt_port = 17601
+ default_http_port = 17401
+ default_https_port = 17301
+ default_debug_port = 15001
+
+ def __init__(self, name=None, image=None, auth=None, user=None,
+ n_cores=None, n_replicas=None,
+ bolt_port=None, http_port=None, https_port=None, debug_port=None,
+ debug_suspend=None, dir_spec=None, config=None, env=None):
+ super(Neo4jClusterService, self).__init__(name, image, auth, user, n_cores, n_replicas,
+ bolt_port, http_port, https_port, debug_port,
+ debug_suspend, dir_spec, config, env)
+ n_cores = n_cores or self.min_cores
+ n_replicas = n_replicas or self.min_replicas
+ if not self.min_cores <= n_cores <= self.max_cores:
+ raise ValueError("A cluster must have been {} and {} "
+ "cores".format(self.min_cores, self.max_cores))
+ if not self.min_replicas <= n_replicas <= self.max_replicas:
+ raise ValueError("A cluster must have been {} and {} "
+ "read replicas".format(self.min_replicas,
+ self.max_replicas))
+
+ core_bolt_port_range = port_range(
+ bolt_port or self.default_bolt_port, self.max_cores)
+ core_http_port_range = port_range(
+ http_port or self.default_http_port, self.max_cores)
+ core_https_port_range = port_range(
+ https_port or self.default_https_port, self.max_cores)
+ core_debug_port_range = port_range(debug_port, self.max_cores)
+ self.free_core_machine_specs = [
+ Neo4jMachineSpec(
+ name=chr(97 + i),
+ service_name=self.name,
+ image=self.image,
+ bolt_port=core_bolt_port_range[i],
+ http_port=core_http_port_range[i],
+ https_port=core_https_port_range[i],
+ # Only suspend first core in cluster, otherwise cluster won't form until debuggers
+ # connect to all of them.
+ debug_opts=debug_opts_type(debug_suspend if i == 0 else False,
+ core_debug_port_range[i]),
+ dir_spec=dir_spec,
+ config=dict(config or {}, **{
+ "dbms.mode": "CORE",
+ "causal_clustering.minimum_core_cluster_size_at_formation":
+ n_cores or self.min_cores,
+ "causal_clustering.minimum_core_cluster_size_at_runtime":
+ self.min_cores,
+ }),
+ env=env,
+ )
+ for i in range(self.max_cores)
+ ]
+ replica_bolt_port_range = port_range(
+ ceil(core_bolt_port_range[-1] / 10) * 10 + 1, self.max_replicas)
+ replica_http_port_range = port_range(
+ ceil(core_http_port_range[-1] / 10) * 10 + 1, self.max_replicas)
+ replica_https_port_range = port_range(
+ ceil(core_https_port_range[-1] / 10) * 10 + 1, self.max_replicas)
+ if debug_port:
+ replica_debug_port_range = port_range(
+ ceil(core_debug_port_range[-1] / 10) * 10 + 1, self.max_replicas)
+ else:
+ replica_debug_port_range = port_range(None, self.max_replicas)
+ self.free_replica_machine_specs = [
+ Neo4jMachineSpec(
+ name=chr(49 + i),
+ service_name=self.name,
+ image=self.image,
+ bolt_port=replica_bolt_port_range[i],
+ http_port=replica_http_port_range[i],
+ https_port=replica_https_port_range[i],
+ # Only suspend first core in cluster, otherwise cluster won't form until debuggers
+ # connect to all of them.
+ debug_opts=debug_opts_type(debug_suspend if i == 0 else False,
+ replica_debug_port_range[i]),
+ dir_spec=dir_spec,
+ config=dict(config or {}, **{
+ "dbms.mode": "READ_REPLICA",
+ }),
+ env=env,
+ )
+ for i in range(self.max_replicas)
+ ]
+
+ # Add core machine specs
+ for i in range(n_cores or self.min_cores):
+ spec = self.free_core_machine_specs.pop(0)
+ self.machines[spec] = None
+ # Add replica machine specs
+ for i in range(n_replicas or self.min_replicas):
+ spec = self.free_replica_machine_specs.pop(0)
+ self.machines[spec] = None
+
+ self.boot()
+
+ def boot(self):
+ discovery_addresses = [spec.discovery_address for spec in self.machines
+ if spec.dbms_mode == "CORE"]
+ log.debug("Discovery addresses set to %r" % discovery_addresses)
+ for spec, machine in self.machines.items():
+ if machine is None:
+ spec.config.update({
+ "causal_clustering.initial_discovery_members":
+ ",".join(discovery_addresses),
+ })
+ self.machines[spec] = Neo4jMachine(spec, self.image, self.auth, self.user)
+
+ def cores(self):
+ return [machine for spec, machine in self.machines.items()
+ if spec.dbms_mode == "CORE"]
+
+ def replicas(self):
+ return [machine for spec, machine in self.machines.items()
+ if spec.dbms_mode == "READ_REPLICA"]
+
+ def routers(self):
+ return list(self.cores())
+
+ def run_console(self):
+ self.console = Neo4jClusterConsole(self)
+ self.console.run()
+
+ def add_core(self):
+ """ Add new core server
+ """
+ if len(self.cores()) < self.max_cores:
+ spec = self.free_core_machine_specs.pop(0)
+ self.machines[spec] = None
+ self.boot()
+ self.machines[spec].start()
+ self.machines[spec].await_started(300)
+ else:
+ raise RuntimeError("A maximum of {} cores "
+ "is permitted".format(self.max_cores))
+
+ def add_replica(self):
+ """ Add new replica server
+ """
+ if len(self.replicas()) < self.max_replicas:
+ spec = self.free_replica_machine_specs.pop(0)
+ self.machines[spec] = None
+ self.boot()
+ self.machines[spec].start()
+ self.machines[spec].await_started(300)
+ else:
+ raise RuntimeError("A maximum of {} replicas "
+ "is permitted".format(self.max_replicas))
+
+ def _remove_machine(self, spec):
+ machine = self.machines[spec]
+ del self.machines[spec]
+ machine.stop()
+ if spec.dbms_mode == "CORE":
+ self.free_core_machine_specs.append(spec)
+ elif spec.dbms_mode == "READ_REPLICA":
+ self.free_replica_machine_specs.append(spec)
+
+ def remove(self, name):
+ """ Remove a server by name (e.g. 'a', 'a.fbe340d').
+ """
+ found = 0
+ for spec, machine in list(self.machines.items()):
+ if name in (spec.name, spec.fq_name):
+ self._remove_machine(spec)
+ found += 1
+ return found
+
+ def reboot(self, name):
+ found = 0
+ for spec, machine in list(self.machines.items()):
+ if name in (spec.name, spec.fq_name):
+ machine.restart()
+ machine.await_started(300)
+ found += 1
+ return found
+
+
+class Neo4jConsole(object):
+
+ args = None
+
+ def __init__(self, service):
+ self.service = service
+
+ def __iter__(self):
+ for name, value in getmembers(self):
+ if isinstance(value, click.Command):
+ yield name
+
+ def __getitem__(self, name):
+ try:
+ f = getattr(self, name)
+ except AttributeError:
+ raise BadParameter('No such command "%s".' % name)
+ else:
+ if isinstance(f, click.Command):
+ return f
+ else:
+ raise BadParameter('No such command "%s".' % name)
+
+ def _iter_machines(self, name):
+ if not name:
+ name = "a"
+ for spec in list(self.service.machines):
+ if name in (spec.name, spec.fq_name):
+ yield self.service.machines[spec]
+
+ def _for_each_machine(self, name, f):
+ found = 0
+ for machine_obj in self._iter_machines(name):
+ f(machine_obj)
+ found += 1
+ return found
+
+ def prompt(self):
+ # We don't use click.prompt functionality here as that doesn't play
+ # nicely with readline. Instead, we use click.echo for the main prompt
+ # text and a raw input call to read from stdin.
+ text = "".join([
+ click.style(self.service.name, fg="green"),
+ click.style(">"),
+ ])
+ prompt_suffix = " "
+ click.echo(text, nl=False)
+ return input(prompt_suffix)
+
+ def run(self):
+ while True:
+ text = self.prompt()
+ if text:
+ self.args = shlex_split(text)
+ self.invoke(*self.args)
+
+ def invoke(self, *args):
+ try:
+ arg0, args = args[0], list(args[1:])
+ f = self[arg0]
+ ctx = f.make_context(arg0, args, obj=self)
+ return f.invoke(ctx)
+ except click.exceptions.Exit:
+ pass
+ except ClickException as error:
+ click.echo(error.format_message(), err=True)
+
+ @click.command()
+ @click.argument("machine", required=False)
+ @click.pass_obj
+ def browser(self, machine):
+ """ Start the Neo4j browser.
+
+ A machine name may optionally be passed, which denotes the server to
+ which the browser should be tied. If no machine name is given, 'a' is
+ assumed.
+ """
+
+ def f(m):
+ http_uri = m.uri("http")
+ click.echo("Opening web browser for machine {!r} at "
+ "«{}»".format(m.spec.fq_name, http_uri))
+ open_browser(http_uri)
+
+ if not self._for_each_machine(machine, f):
+ raise BadParameter("Machine {!r} not found".format(machine))
+
+ @click.command()
+ @click.pass_obj
+ def env(self):
+ """ Show available environment variables.
+
+ Each service exposes several environment variables which contain
+ information relevant to that service. These are:
+
+ BOLT_SERVER_ADDR space-separated string of router addresses
+ NEO4J_AUTH colon-separated user and password
+
+ """
+ for key, value in sorted(self.service.env().items()):
+ click.echo("%s=%r" % (key, value))
+
+ @click.command()
+ @click.pass_obj
+ def exit(self):
+ """ Shutdown all machines and exit the console.
+ """
+ raise SystemExit()
+
+ @click.command()
+ @click.argument("command", required=False)
+ @click.pass_obj
+ def help(self, command):
+ """ Get help on a command or show all available commands.
+ """
+ if command:
+ try:
+ f = self[command]
+ except KeyError:
+ raise BadParameter('No such command "%s".' % command)
+ else:
+ ctx = self.help.make_context(command, [], obj=self)
+ click.echo(f.get_help(ctx))
+ else:
+ click.echo("Commands:")
+ command_width = max(map(len, self))
+ text_width = 73 - command_width
+ template = " {:<%d} {}" % command_width
+ for arg0 in sorted(self):
+ f = self[arg0]
+ text = [f.get_short_help_str(limit=text_width)]
+ for i, line in enumerate(text):
+ if i == 0:
+ click.echo(template.format(arg0, line))
+ else:
+ click.echo(template.format("", line))
+
+ @click.command()
+ @click.pass_obj
+ def ls(self):
+ """ Show a detailed list of the available servers.
+
+ Routing information for the current transaction context is refreshed
+ automatically if expired, or can be manually refreshed with the -r
+ option. Each server is listed by name, along with the following
+ details:
+
+ \b
+ - Docker container in which the server is running
+ - Server mode: CORE, READ_REPLICA or SINGLE
+ - Bolt port
+ - HTTP port
+ - Debug port
+
+ """
+ click.echo("NAME CONTAINER MODE "
+ "BOLT PORT HTTP PORT DEBUG PORT")
+ for spec, machine in self.service.machines.items():
+ if spec is None or machine is None:
+ continue
+ click.echo("{:<12}{:<12}{:<15}{:<12}{:<12}{}".format(
+ spec.fq_name,
+ machine.container.short_id,
+ spec.config.get("dbms.mode", "SINGLE"),
+ spec.bolt_port or "-",
+ spec.http_port or "-",
+ spec.debug_opts.port or "-",
+ ))
+
+ @click.command()
+ @click.argument("machine", required=False)
+ @click.pass_obj
+ def ping(self, machine):
+ """ Ping a server by name to check it is available. If no server name
+ is provided, 'a' is used as a default.
+ """
+
+ def f(m):
+ m.ping(timeout=0)
+
+ if not self._for_each_machine(machine, f):
+ raise BadParameter("Machine {!r} not found".format(machine))
+
+ @click.command()
+ @click.argument("gdb", required=False)
+ @click.pass_obj
+ def rt(self, gdb):
+ """ Display the routing table for a given graph database.
+ """
+ routers = self.service.routers()
+ cx = Connector(ServiceProfile(routers[0].profiles["bolt"], routing=True))
+ if gdb is None:
+ click.echo("Refreshing routing information for the default graph database...")
+ else:
+ click.echo("Refreshing routing information for graph database %r..." % gdb)
+ rt = cx.refresh_routing_table(gdb)
+ ro_profiles, rw_profiles, _ = rt.runners()
+ click.echo("Routers: %s" % " ".join(map(str, cx.get_router_profiles())))
+ click.echo("Readers: %s" % " ".join(map(str, ro_profiles)))
+ click.echo("Writers: %s" % " ".join(map(str, rw_profiles)))
+ cx.close()
+
+ @click.command()
+ @click.argument("machine", required=False)
+ @click.pass_obj
+ def logs(self, machine):
+ """ Display logs for a named server.
+
+ If no server name is provided, 'a' is used as a default.
+ """
+
+ def f(m):
+ click.echo(m.container.logs())
+
+ if not self._for_each_machine(machine, f):
+ raise BadParameter("Machine {!r} not found".format(machine))
+
+ @click.command()
+ @click.argument("time", type=float)
+ @click.argument("machine", required=False)
+ @click.pass_obj
+ def pause(self, time, machine):
+ """ Pause a server for a given number of seconds.
+
+ If no server name is provided, 'a' is used as a default.
+ """
+
+ def f(m):
+ click.echo("Pausing machine {!r} for {}s".format(m.spec.fq_name,
+ time))
+ m.container.pause()
+ sleep(time)
+ m.container.unpause()
+ m.ping(timeout=0)
+
+ if not self._for_each_machine(machine, f):
+ raise BadParameter("Machine {!r} not found".format(machine))
+
+
+class Neo4jClusterConsole(Neo4jConsole):
+
+ @click.command()
+ @click.argument("mode")
+ @click.pass_obj
+ def add(self, mode):
+ """ Add a new server by mode.
+
+ The new server can be added in either "core" or "read-replica" mode.
+ The full set of MODE values available are:
+
+ - c, core
+ - r, rr, replica, read-replica, read_replica
+
+ """
+ if mode in ("c", "core"):
+ self.service.add_core()
+ elif mode in ("r", "rr", "replica", "read-replica", "read_replica"):
+ self.service.add_replica()
+ else:
+ raise BadParameter('Invalid value for "MODE", choose from '
+ '"core" or "read-replica"'.format(mode))
+
+ @click.command()
+ @click.argument("machine")
+ @click.pass_obj
+ def rm(self, machine):
+ """ Remove a server by name or role.
+
+ Servers can be identified either by their name (e.g. 'a', 'a.fbe340d')
+ or by the role they fulfil (i.e. 'r' or 'w').
+ """
+ if not self.service.remove(machine):
+ raise BadParameter("Machine {!r} not found".format(machine))
+
+ @click.command()
+ @click.argument("machine")
+ @click.pass_obj
+ def reboot(self, machine):
+ """ Reboot a server by name or role.
+
+ Servers can be identified either by their name (e.g. 'a', 'a.fbe340d')
+ or by the role they fulfil (i.e. 'r' or 'w').
+ """
+ if not self.service.reboot(machine):
+ raise BadParameter("Machine {!r} not found".format(machine))
diff --git a/test/vendor/grolt/grolt/__main__.py b/test/vendor/grolt/grolt/__main__.py
new file mode 100644
index 00000000..dc9b0074
--- /dev/null
+++ b/test/vendor/grolt/grolt/__main__.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from logging import CRITICAL, DEBUG, ERROR, INFO, WARNING, Formatter, StreamHandler, getLogger
+from subprocess import call
+import sys
+from sys import stdout
+
+import click
+from click import ParamType, Path
+
+from grolt import make_auth, Neo4jService, Neo4jDirectorySpec, __name__ as root_module_name
+from grolt.compat import shlex_quote
+from grolt.security import install_self_signed_certificate
+
+# The readline import allows for extended input functionality, including
+# up/down arrow navigation. This should not be removed.
+try:
+ import readline
+except ModuleNotFoundError as e:
+ # readline is not available for windows 10
+ # noinspection PyUnresolvedReferences
+ from pyreadline import Readline
+ readline = Readline()
+
+
+class AuthParamType(ParamType):
+
+ name = "auth"
+
+ def __init__(self, default_user=None, default_password=None):
+ self.default_user = default_user
+ self.default_password = default_password
+
+ def convert(self, value, param, ctx):
+ try:
+ return make_auth(value, self.default_user, self.default_password)
+ except ValueError as error:
+ self.fail(error.args[0], param, ctx)
+
+ def __repr__(self):
+ return 'USER:PASSWORD'
+
+
+class VolumeMount(object):
+ def __init__(self, source, destination):
+ self.source = source
+ self.destination = destination
+
+
+class VolumeMountParamType(click.ParamType):
+
+ name = "vol"
+
+ source_spec = Path(exists=True, dir_okay=True, readable=True, writable=True, allow_dash=False)
+ destination_spec = Path(exists=False, allow_dash=False)
+
+ def __init__(self):
+ pass
+
+ def convert(self, value, param, ctx):
+ [source, destination] = value.split(":")
+ return VolumeMount(
+ self.source_spec.convert(source.strip(), None, None),
+ self.destination_spec.convert(destination.strip(), None, None)
+ )
+
+ def __repr__(self):
+ return 'SOURCE:DESTINATION'
+
+
+class ConfigParamType(click.ParamType):
+
+ name = "NAME=VALUE"
+
+ def __repr__(self):
+ return 'NAME=VALUE'
+
+
+class ColourFormatter(Formatter):
+
+ def format(self, record):
+ s = super(ColourFormatter, self).format(record)
+ bits = s.split(" ", 1)
+ bits[0] = click.style(bits[0], fg="blue")
+ if record.levelno == CRITICAL:
+ bits[1] = click.style(bits[1], fg="bright_red")
+ elif record.levelno == ERROR:
+ bits[1] = click.style(bits[1], fg="bright_yellow")
+ elif record.levelno == WARNING:
+ bits[1] = click.style(bits[1], fg="yellow")
+ elif record.levelno == INFO:
+ pass
+ elif record.levelno == DEBUG:
+ bits[1] = click.style(bits[1], fg="cyan")
+ return " ".join(bits)
+
+
+class Watcher(object):
+ """ Log watcher for monitoring driver and protocol activity.
+ """
+
+ handlers = {}
+
+ def __init__(self, logger_name):
+ super(Watcher, self).__init__()
+ self.logger_name = logger_name
+ self.logger = getLogger(self.logger_name)
+ self.formatter = ColourFormatter("%(asctime)s %(message)s",
+ "%H:%M:%S")
+
+ def watch(self, level=INFO, out=stdout):
+ self.stop()
+ handler = StreamHandler(out)
+ handler.setFormatter(self.formatter)
+ self.handlers[self.logger_name] = handler
+ self.logger.addHandler(handler)
+ self.logger.setLevel(level)
+
+ def stop(self):
+ try:
+ self.logger.removeHandler(self.handlers[self.logger_name])
+ except KeyError:
+ pass
+
+
+def watch_log(ctx, param, value):
+ watcher = Watcher(root_module_name)
+ watcher.watch(DEBUG if value >= 1 else INFO)
+ return watcher
+
+
+@click.command(context_settings={"ignore_unknown_options": True}, help="""\
+Run a Neo4j cluster or standalone server in one or more local Docker
+containers.
+
+If an additional COMMAND is supplied, this will be executed after startup,
+with a shutdown occurring immediately afterwards. If no COMMAND is supplied,
+an interactive command line console will be launched which allows direct
+control of the service. This console can be shut down with Ctrl+C, Ctrl+D or
+by entering the command 'exit'.
+
+A couple of environment variables will also be made available to any COMMAND
+passed. These are:
+
+\b
+- BOLT_SERVER_ADDR
+- NEO4J_AUTH
+
+""")
+@click.option("-a", "--auth", type=AuthParamType(), envvar="NEO4J_AUTH",
+ help="Credentials with which to bootstrap the service. These "
+ "must be specified as a 'user:password' pair and may "
+ "alternatively be supplied via the NEO4J_AUTH environment "
+ "variable. These credentials will also be exported to any "
+ "COMMAND executed during the service run.")
+@click.option("-B", "--bolt-port", type=int,
+ help="A port number (standalone) or base port number (cluster) "
+ "for Bolt traffic.")
+@click.option("-c", "--n-cores", type=int,
+ help="If specified, a cluster with this many cores will be "
+ "created. If omitted, a standalone service will be created "
+ "instead. See also -r for specifying the number of read "
+ "replicas.")
+@click.option("-C", "--config", type=ConfigParamType(), multiple=True,
+ help="Pass a configuration value into neo4j.conf. This can be "
+ "used multiple times.")
+@click.option("-d", "--directory", multiple=True, type=VolumeMountParamType(),
+ help="Share a local directory into the neo4j docker container(s) "
+ "(mount a volume in docker parlance). "
+ "N.b. the directory is shared to ALL docker containers.")
+@click.option("-D", "--debug-port", type=int,
+ help="The port number (standalone) or base port number (cluster) "
+ "for java remote debugging.")
+@click.option("-e", "--env", type=ConfigParamType(), multiple=True,
+ help="Pass an env value into neo4j docker containers. This can be "
+ "used multiple times.")
+@click.option("-E", "--debug-suspend", is_flag=True,
+ help="The first Neo4j server process (machine a) should hang "
+ "until a connection is made by a remote java debugger. This "
+ "option is only valid if a debug port is specified with -D.")
+# -h / --help is automatically provided by click
+@click.option("-H", "--http-port", type=int,
+ help="A port number (standalone) or base port number (cluster) "
+ "for HTTP traffic.")
+@click.option("--https-port", type=int,
+ help="A port number (standalone) or base port number (cluster) "
+ "for HTTPS traffic.")
+@click.option("-i", "--image",
+ help="The Docker image tag to use for building containers. The "
+ "repository name can be included before the colon, but will "
+ "default to 'neo4j' if omitted. Note that a Neo4j "
+ "Enterprise Edition image is required for building "
+ "clusters. File URLs can also be passed, which can "
+ "allow for loading images from local tar files.")
+@click.option("-I", "--import-dir", type=Path(exists=True, dir_okay=True,
+ writable=True),
+ help="Share a local directory for use by server import.")
+@click.option("-L", "--logs-dir", type=Path(exists=True, dir_okay=True,
+ writable=True),
+ help="Share a local directory for use by server logs. A "
+ "subdirectory will be created for each machine.")
+@click.option("-n", "--name",
+ help="A Docker network name to which all servers will be "
+ "attached. If omitted, an auto-generated name will be "
+ "used.")
+@click.option("-N", "--neo4j-source-dir", type=Path(exists=True, dir_okay=True),
+ help="Path to neo4j source repo. Mounts and uses the "
+ "packaged neo4j jars and binaries from there.")
+@click.option("-P", "--plugins-dir", type=Path(exists=True, dir_okay=True,
+ writable=True),
+ help="Share a local directory for use by server plugins.")
+@click.option("-r", "--n-replicas", type=int,
+ help="The number of read replicas to include within the "
+ "cluster. This option will only take effect if -c is also "
+ "used.")
+@click.option("-R", "--server-side-routing", is_flag=True,
+ help="Enable server-side routing.")
+@click.option("-S", "--certificates-dir", type=Path(exists=True, dir_okay=True,
+ writable=True),
+ help="Share a local directory for use by server certificates.")
+@click.option("-u", "--user",
+ help="User name or ID as whom to run the Docker container. "
+ "For the current user, use `-u $(whoami)`.")
+@click.option("-v", "--verbose", count=True, callback=watch_log,
+ expose_value=False, is_eager=True,
+ help="Show more detail about the startup and shutdown process.")
+@click.option("-Z", "--self-signed-certificate", is_flag=True,
+ help="Generate and use a self-signed certificate.")
+@click.argument("command", nargs=-1, type=click.UNPROCESSED)
+def grolt(
+ command,
+ name,
+ image,
+ auth,
+ user,
+ n_cores,
+ n_replicas,
+ bolt_port,
+ http_port,
+ https_port,
+ debug_port,
+ env,
+ debug_suspend,
+ import_dir,
+ logs_dir,
+ plugins_dir,
+ certificates_dir,
+ neo4j_source_dir,
+ directory,
+ config,
+ server_side_routing,
+ self_signed_certificate,
+):
+ try:
+ if self_signed_certificate:
+ if certificates_dir is not None:
+ click.echo("Incompatible certificate options specified", err=True)
+ exit(1)
+ certificates_dir = install_self_signed_certificate(image)
+
+ dir_spec = Neo4jDirectorySpec(
+ import_dir=import_dir,
+ logs_dir=logs_dir,
+ plugins_dir=plugins_dir,
+ certificates_dir=certificates_dir,
+ shared_dirs=directory,
+ neo4j_source_dir=neo4j_source_dir,
+ )
+ config_dict = dict(item.partition("=")[::2] for item in config)
+ if server_side_routing:
+ config_dict["dbms.routing.enabled"] = "true"
+ env_dict = dict(item.partition("=")[::2] for item in env)
+ with Neo4jService(
+ name,
+ image,
+ auth,
+ user,
+ n_cores,
+ n_replicas,
+ bolt_port,
+ http_port,
+ https_port,
+ debug_port,
+ debug_suspend,
+ dir_spec,
+ config_dict,
+ env_dict
+ ) as neo4j:
+ if command:
+ call(" ".join(map(shlex_quote, command)), shell=True,
+ env=neo4j.env())
+ else:
+ neo4j.run_console()
+ except KeyboardInterrupt:
+ sys.exit(130)
+ except Exception as error:
+ message = " ".join(map(str, error.args))
+ if hasattr(error, 'explanation'):
+ message += "\n" + error.explanation
+ click.echo(message, err=True)
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ grolt()
diff --git a/test/vendor/grolt/grolt/compat.py b/test/vendor/grolt/grolt/compat.py
new file mode 100644
index 00000000..94a14ef9
--- /dev/null
+++ b/test/vendor/grolt/grolt/compat.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+
+import re
+
+
+_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
+
+
+def shlex_quote(s):
+ """Return a shell-escaped version of the string *s*."""
+ if not s:
+ return "''"
+ if _find_unsafe(s) is None:
+ return s
+
+ # use single quotes, and put single quotes into double quotes
+ # the string $'b is then quoted as '$'"'"'b'
+ return "'" + s.replace("'", "'\"'\"'") + "'"
diff --git a/test/vendor/grolt/grolt/images.py b/test/vendor/grolt/grolt/images.py
new file mode 100644
index 00000000..fc4ff1e0
--- /dev/null
+++ b/test/vendor/grolt/grolt/images.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from docker import DockerClient
+
+
+docker = DockerClient.from_env(version="auto")
+
+
+def is_legacy_image(image):
+ return image.startswith("3") or image.startswith("neo4j:3")
+
+
+def resolve_image(image):
+ """ Resolve an informal image tag into a full Docker image tag. Any tag
+ available on Docker Hub for Neo4j can be used, and if no 'neo4j:' prefix
+ exists, this will be added automatically. The default edition is
+ Community, unless a cluster is being created in which case Enterprise
+ edition is selected instead. Explicit selection of Enterprise edition can
+ be made by adding an '-enterprise' suffix to the image tag.
+
+ If a 'file:' URI is passed in here instead of an image tag, the Docker
+ image will be loaded from that file instead.
+
+ Examples of valid tags:
+ - 3.4.6
+ - neo4j:3.4.6
+ - latest
+ - file:/home/me/image.tar
+
+ """
+ if image.startswith("file:"):
+ return load_image_from_file(image[5:])
+ elif ":" in image:
+ return image
+ else:
+ return "neo4j:" + image
+
+
+def load_image_from_file(name):
+ with open(name, "rb") as f:
+ images = docker.images.load(f.read())
+ image = images[0]
+ return image.tags[0]
diff --git a/test/vendor/grolt/grolt/security/__init__.py b/test/vendor/grolt/grolt/security/__init__.py
new file mode 100644
index 00000000..f253743d
--- /dev/null
+++ b/test/vendor/grolt/grolt/security/__init__.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import absolute_import
+
+from collections import namedtuple
+from os import chmod, path
+from tempfile import mkdtemp
+from uuid import uuid4
+
+from grolt.images import is_legacy_image, resolve_image
+from grolt.security._cryptography import (make_self_signed_certificate,
+ install_certificate,
+ install_private_key)
+
+
+Auth = namedtuple("Auth", ["user", "password"])
+
+
+def make_auth(value=None, default_user=None, default_password=None):
+ try:
+ user, _, password = str(value or "").partition(":")
+ except AttributeError:
+ raise ValueError("Invalid auth string {!r}".format(value))
+ else:
+ return Auth(user or default_user or "neo4j",
+ password or default_password or uuid4().hex)
+
+
+def install_self_signed_certificate(image):
+ """ Install a self-signed certificate for the given Docker image
+ and return the installation directory.
+ """
+ if is_legacy_image(resolve_image(image)):
+ return None # Automatically available in 3.x
+ cert, key = make_self_signed_certificate()
+ certificates_dir = mkdtemp()
+ chmod(certificates_dir, 0o755)
+ subdirectories = [path.join(certificates_dir, subdir)
+ for subdir in ["bolt", "https"]]
+ install_private_key(key, "private.key", *subdirectories)
+ install_certificate(cert, "public.crt", *subdirectories)
+ return certificates_dir
diff --git a/test/vendor/grolt/grolt/security/_cryptography.py b/test/vendor/grolt/grolt/security/_cryptography.py
new file mode 100644
index 00000000..cf571725
--- /dev/null
+++ b/test/vendor/grolt/grolt/security/_cryptography.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import absolute_import
+
+from datetime import datetime, timedelta
+from logging import getLogger
+from os import makedirs, path
+from socket import gethostname
+
+from cryptography import x509
+from cryptography.x509.oid import NameOID
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+from six import u
+
+
+log = getLogger("grolt.security")
+
+
+def make_self_signed_certificate():
+
+ # create a private key
+ log.debug("Generating private key")
+ key = rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=2048,
+ backend=default_backend()
+ )
+
+ # Create a self-signed cert.
+ log.debug("Generating self-signed certificate")
+ subject = issuer = x509.Name([
+ x509.NameAttribute(NameOID.COUNTRY_NAME, u"GB"),
+ x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u"Kent"),
+ x509.NameAttribute(NameOID.LOCALITY_NAME, u"Canterbury"),
+ x509.NameAttribute(NameOID.ORGANIZATION_NAME, u"Example"),
+ x509.NameAttribute(NameOID.COMMON_NAME, u(gethostname())),
+ ])
+ cert = x509.CertificateBuilder().subject_name(
+ subject
+ ).issuer_name(
+ issuer
+ ).public_key(
+ key.public_key()
+ ).serial_number(
+ x509.random_serial_number()
+ ).not_valid_before(
+ datetime.utcnow()
+ ).not_valid_after(
+ datetime.utcnow() + timedelta(days=7)
+ ).add_extension(
+ x509.SubjectAlternativeName([x509.DNSName(u"localhost")]),
+ critical=False,
+ ).sign(key, hashes.SHA256(), default_backend())
+
+ return cert, key
+
+
+def install_certificate(cert, name, *cert_dirs):
+ for cert_dir in cert_dirs:
+ try:
+ makedirs(cert_dir)
+ except OSError:
+ pass
+ cert_file = path.join(cert_dir, name)
+ log.debug("Installing certificate to %r", cert_file)
+ with open(cert_file, "wb") as f:
+ f.write(cert.public_bytes(serialization.Encoding.PEM))
+
+
+def install_private_key(key, name, *key_dirs):
+ for key_dir in key_dirs:
+ try:
+ makedirs(key_dir)
+ except OSError:
+ pass
+ key_file = path.join(key_dir, name)
+ log.debug("Installing private key to %r", key_file)
+ with open(key_file, "wb") as f:
+ f.write(key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ ))
diff --git a/test/vendor/grolt/requirements.txt b/test/vendor/grolt/requirements.txt
new file mode 100644
index 00000000..379221e5
--- /dev/null
+++ b/test/vendor/grolt/requirements.txt
@@ -0,0 +1,11 @@
+certifi
+cryptography~=2.0; python_version<'3.6'
+cryptography~=3.0; python_version>='3.6'
+click<8.0; python_version<'3.6'
+click; python_version>='3.6'
+docker<5.0; python_version<'3.6'
+docker; python_version>='3.6'
+monotonic
+py2neo>=2021.1.4
+pyreadline>=2.1; platform_system=='Windows'
+six
diff --git a/test/vendor/grolt/setup.py b/test/vendor/grolt/setup.py
new file mode 100644
index 00000000..045d1752
--- /dev/null
+++ b/test/vendor/grolt/setup.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+# Copyright 2011-2021, Nigel Small
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from setuptools import setup, find_packages
+
+
+packages = find_packages()
+package_metadata = {
+ "name": "grolt",
+ "version": "1.0.7",
+ "description": "Docker-based development and testing framework for Neo4j",
+ "long_description": "Please see https://github.com/technige/grolt "
+ "for details.",
+ "author": "Nigel Small",
+ "author_email": "technige@py2neo.org",
+ "entry_points": {
+ "console_scripts": [
+ "grolt = grolt.__main__:grolt",
+ ],
+ },
+ "packages": packages,
+ "install_requires": [
+ "certifi",
+ "cryptography~=2.0; python_version<'3.6'",
+ "cryptography~=3.0; python_version>='3.6'",
+ "click<8.0; python_version<'3.6'",
+ "click; python_version>='3.6'",
+ "docker<5.0; python_version<'3.6'",
+ "docker; python_version>='3.6'",
+ "monotonic",
+ "py2neo>=2021.1.4",
+ "pyreadline>=2.1; platform_system=='Windows'",
+ "six",
+ ],
+ "license": "Apache License, Version 2.0",
+ "classifiers": [
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Topic :: Database",
+ "Topic :: Software Development",
+ ],
+}
+
+setup(**package_metadata)
diff --git a/test/vendor/grolt/tox.ini b/test/vendor/grolt/tox.ini
new file mode 100644
index 00000000..b0cb54e1
--- /dev/null
+++ b/test/vendor/grolt/tox.ini
@@ -0,0 +1,19 @@
+[tox]
+envlist =
+ py35
+ py36
+ py37
+ # py38
+
+[testenv]
+passenv =
+ NEO4J_SERVER_PACKAGE
+ NEO4J_USER
+ NEO4J_PASSWORD
+ JAVA_HOME
+commands =
+ python setup.py develop
+ pip install --quiet --upgrade -r {toxinidir}/test/requirements.txt
+ coverage erase
+ coverage run -m pytest -v {posargs} test
+ coverage report