Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
ericof committed May 7, 2024
0 parents commit 0cbac78
Show file tree
Hide file tree
Showing 139 changed files with 4,728 additions and 0 deletions.
36 changes: 36 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# EditorConfig Configurtaion file, for more details see:
# https://EditorConfig.org
# EditorConfig is a convention description, that could be interpreted
# by multiple editors to enforce common coding conventions for specific
# file types

# top-most EditorConfig file:
# Will ignore other EditorConfig files in Home directory or upper tree level.
root = true


[*] # For All Files
# Unix-style newlines with a newline ending every file
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
# Set default charset
charset = utf-8
# Indent style default
indent_style = space
# Max Line Length - a hard line wrap, should be disabled
max_line_length = off

[*.{py,cfg,ini}]
# 4 space indentation
indent_size = 4

[*.{html,dtml,pt,zpt,xml,zcml,js,json,less,css,yml,yaml}]
# 2 space indentation
indent_size = 2

[{Makefile,.gitmodules}]
# Tab indentation (no size specified, but view as 4 spaces)
indent_style = tab
indent_size = unset
tab_width = unset
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
/backend_addon/ @ericof
/frontend_addon/ @sneridagh
44 changes: 44 additions & 0 deletions .github/workflows/backend_addon.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Plone Backend Add-on CI
on:
push:
paths:
- "backend_addon/**"
- ".github/workflows/backend_addon.yml"
workflow_dispatch:

jobs:

generation:

runs-on: ubuntu-latest

strategy:
fail-fast: false
matrix:
python-version:
- "3.10"
- "3.11"
- "3.12"

steps:
# git checkout
- name: Checkout codebase
uses: actions/checkout@v4

# python setup
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'

# python install
- name: Install dependencies
run: |
pip install -r requirements.txt
# Test
- name: Run tests
run: |
cd backend_addon
python -m pytest tests
168 changes: 168 additions & 0 deletions .github/workflows/frontend_addon.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
name: Plone Frontend Add-on CI
on:
push:
paths:
- "frontend_addon/**"
- ".github/workflows/frontend_addon.yml"
workflow_dispatch:

env:
NODE_VERSION: 20.x
PYTHON_VERSION: "3.10"

jobs:

generation:

runs-on: ubuntu-latest

strategy:
fail-fast: false
matrix:
python-version:
- "3.10"
- "3.11"
- "3.12"

steps:
# git checkout
- name: Checkout codebase
uses: actions/checkout@v4

# python setup
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'

# python install
- name: Install dependencies
run: |
pip install -r requirements.txt
# Test
- name: Run tests
run: |
cd frontend_addon
python -m pytest tests
functional:
runs-on: ubuntu-latest

steps:
- name: Checkout codebase
uses: actions/checkout@v4

- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v5
with:
python-version: '${{ env.PYTHON_VERSION }}'
cache: 'pip'

- name: Install dependencies
run: |
pip install -r requirements.txt
- name: Generate
working-directory: frontend_addon
run: |
make generate
- name: Use Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}

- uses: pnpm/action-setup@v3
name: Install pnpm
with:
version: 8
# We don't want to install until later,
# when the cache and Cypress are in place
run_install: false

- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Cache Cypress Binary
id: cache-cypress-binary
uses: actions/cache@v4
with:
path: ~/.cache/Cypress
key: binary-${{ env.NODE_VERSION }}-${{ hashFiles('pnpm-lock.yaml') }}

- name: Install generated package
working-directory: frontend_addon/volto-addon
run: |
make install
- name: Run unit tests
working-directory: frontend_addon/volto-addon
run: |
make test-ci
- name: Run i18n tests
working-directory: frontend_addon/volto-addon
run: |
make i18n
- name: Run linting
working-directory: frontend_addon/volto-addon
run: |
make lint
- name: Run formatting
working-directory: frontend_addon/volto-addon
run: |
make format
- name: Run Storybook
working-directory: frontend_addon/volto-addon
run: |
make storybook-build
- name: Start Servers
uses: JarvusInnovations/background-action@v1
with:
working-directory: frontend_addon/volto-addon
run: |
make start-test-acceptance-server-ci &
make start-test-acceptance-frontend &
# your step-level and job-level environment variables are available to your commands as-is
# npm install will count towards the wait-for timeout
# whenever possible, move unrelated scripts to a different step
# to background multiple processes: add & to the end of the command

wait-on: |
http-get://localhost:55001/plone
http://localhost:3000
# IMPORTANT: to use environment variables in wait-on, you must use this form: ${{ env.VAR }}
# See wait-on section below for all resource types and prefixes

tail: true # true = stderr,stdout
# This will allow you to monitor the progress live

log-output-resume: stderr
# Eliminates previosuly output stderr log entries from post-run output

wait-for: 10m

log-output: stderr,stdout # same as true

log-output-if: failure

- name: Run acceptance tests
working-directory: frontend_addon/volto-addon
run: |
make test-acceptance-headless
11 changes: 11 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
bin/
include/
lib/
lib64/
pip-selfcheck.json
.vscode
collective.plonedistribution
.pytest_cache
.reports/*.csv
.reports/*.json
*/__pycache__/
Empty file added .reports/.gitkeep
Empty file.
33 changes: 33 additions & 0 deletions .scripts/report_context.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import json
from datetime import date
from pathlib import Path

from git import Repo

cwd = Path().cwd()
reports = cwd / ".reports"

repo = Repo(cwd)
last_commit = repo.head.commit

report_filename = f"{date.today()}-{last_commit.hexsha[:7]}-report.csv"

folders = ["backend_addon", "frontend_addon"]
ignore = ["__prompts__", "_copy_without_render", "_extensions"]
data = []

for folder in folders:
file_ = cwd / folder / "cookiecutter.json"
questions = json.loads(file_.read_text())
items = [
(folder, key, value) for key, value in questions.items() if key not in ignore
]
data.extend(items)

report_path = reports / report_filename
with open(report_path, "w") as fout:
fout.write("template\tkey\tvalue\n")
for addon, key, value in data:
fout.write(f'"{addon}"\t"{key}"\t"{value}"\n')

print(f"Report available at {report_path}")
79 changes: 79 additions & 0 deletions .scripts/report_keys_usage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import json
import re
from collections import defaultdict
from datetime import date
from pathlib import Path

from binaryornot.check import is_binary
from git import Repo

PATTERN = "{{ ?(cookiecutter)[.]([a-zA-Z0-9-_]*)"
RE_OBJ = re.compile(PATTERN)

cwd = Path().cwd()
reports = cwd / ".reports"

repo = Repo(cwd)
last_commit = repo.head.commit

report_filename = f"{date.today()}-{last_commit.hexsha[:7]}-usage.json"

folders = ["backend_addon", "frontend_addon"]
ignore = [
"__prompts__",
]


def as_sorted_list(value: set) -> list:
"""Convert a set to a list and sort it."""
value = list(value)
return sorted(value)


def find_and_add_keys(used_keys: set, data: str) -> set:
matches = RE_OBJ.findall(data) or []
for match in matches:
used_keys.add(match[1])
return used_keys


def valid_key(key: str) -> bool:
"""Check if we will check for this key."""
return all(
[
key not in ignore,
key.startswith("__") or not key.startswith("_"),
]
)


keys = defaultdict(dict)
for folder in folders:
base_path = cwd / folder
file_ = base_path / "cookiecutter.json"
template_folder = base_path / "{{ cookiecutter.__folder_name }}"
raw_context = file_.read_text()
used_keys = find_and_add_keys({"__folder_name"}, raw_context)
questions = json.loads(raw_context)
items = {key for key in questions.keys() if valid_key(key)}
all_files = template_folder.glob("**/*")
# Already add __folder_name
for filepath in all_files:
data = filepath.name
is_file = filepath.is_file()
if is_file and is_binary(f"{filepath}"):
continue
if is_file:
data = f"{data} {filepath.read_text()}"
used_keys = find_and_add_keys(used_keys, data)
keys[folder]["all"] = as_sorted_list(items)
keys[folder]["used"] = as_sorted_list(used_keys & items)
keys[folder]["not_used"] = as_sorted_list(items.difference(used_keys))
keys[folder]["missing"] = as_sorted_list(used_keys.difference(items))


report_path = reports / report_filename
with open(report_path, "w") as fout:
json.dump(keys, fout, indent=2)

print(f"Report available at {report_path}")
Loading

0 comments on commit 0cbac78

Please sign in to comment.