Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add minifier for the web-page output #44

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ jobs:
run: dnf -y update fedora-gpg-keys

- name: Install git and Python libraries
run: dnf -y install git-core python3-yaml python3-jinja2 python3-koji python3-pytest python3-flake8
run: dnf -y install git-core python3-yaml python3-htmlmin python3-jinja2 python3-koji python3-pytest python3-flake8

- name: Clean up
run: dnf clean all

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from registry.fedoraproject.org/fedora:37

run dnf -y update fedora-gpg-keys && \
dnf -y install git python3-jinja2 python3-koji python3-yaml && \
dnf -y install git python3-pytest python3-htmlmin python3-pytest-cov python3-jinja2 python3-koji python3-yaml && \
dnf clean all

workdir /workspace
Expand Down
19 changes: 14 additions & 5 deletions feedback_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/python3

import argparse, yaml, tempfile, os, subprocess, json, jinja2, datetime, copy, re, dnf, pprint, urllib.request, sys, koji
import argparse, yaml, tempfile, os, subprocess, json, jinja2, datetime, copy, re, dnf, pprint, urllib.request, sys, koji, htmlmin
import concurrent.futures
import rpm_showme as showme
from functools import lru_cache
Expand Down Expand Up @@ -150,7 +150,7 @@ def datetime_now_string():
return datetime.datetime.now().strftime("%m/%d/%Y, %H:%M:%S")


def load_settings():
def load_settings(argv=None):
settings = {}

parser = argparse.ArgumentParser()
Expand All @@ -159,11 +159,13 @@ def load_settings():
parser.add_argument("--use-cache", dest="use_cache", action='store_true', help="Use local data instead of pulling Content Resolver. Saves a lot of time! Needs a 'cache_data.json' file at the same location as the script is at.")
parser.add_argument("--dev-buildroot", dest="dev_buildroot", action='store_true', help="Buildroot grows pretty quickly. Use a fake one for development.")
parser.add_argument("--dnf-cache-dir", dest="dnf_cache_dir_override", help="Override the dnf cache_dir.")
args = parser.parse_args()
parser.add_argument("--htmlmin", dest="htmlmin", action='store_true', help="Run html minimiser while producing the pages.")
args = parser.parse_args(argv)

settings["configs"] = args.configs
settings["output"] = args.output
settings["use_cache"] = args.use_cache
settings["htmlmin"] = args.htmlmin
settings["dev_buildroot"] = args.dev_buildroot
settings["dnf_cache_dir_override"] = args.dnf_cache_dir_override

Expand Down Expand Up @@ -5721,6 +5723,13 @@ def _generate_html_page(template_name, template_data, page_name, settings):

page = template.render(**template_data)

if settings["htmlmin"]:
try:
page = htmlmin.minify(page, remove_empty_space=True)
except Exception as e:
log(" Minification failed for... ({})".format(filename))


filename = ("{page_name}.html".format(
page_name=page_name.replace(":", "--")
))
Expand Down Expand Up @@ -7245,7 +7254,7 @@ def generate_historic_data(query):
###############################################################################


def main():
def main(argv=None):

# -------------------------------------------------
# Stage 1: Data collection and analysis using DNF
Expand All @@ -7254,7 +7263,7 @@ def main():
# measuring time of execution
time_started = datetime_now_string()

settings = load_settings()
settings = load_settings(argv)

settings["global_refresh_time_started"] = datetime.datetime.now().strftime("%-d %B %Y %H:%M UTC")

Expand Down
73 changes: 73 additions & 0 deletions test_configs/base-test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
---
# This configuration file defines an "Environment" in Feedback Pipeline.
# https://tiny.distro.builders
#
# Environments influence how a workload looks like when installed.
# That's achieved by including specific packages — like coreutils-single — that
# influence the result.
# Environments can also act as base images when monitoring container sizes.

document: feedback-pipeline-environment
version: 1
data:
# id is the filename — that automatically prevents collisions for free!


### MANDATORY FIELDS ###

# Name is an identifier for humans
#
# (mandatory field)
name: Test F34 Environment

# A short description, perhaps hinting the purpose
#
# (mandatory field)
description: A base environment on top of which all test workloads are analyzed.

# Who maintains it? This is just a freeform string
# for humans to read. In Fedora, a FAS nick is recommended.
#
# (mandatory field)
maintainer: asamalik

# Different instances of the environment, one per repository.
#
# (mandatory field)
repositories:
- repo-test

# Packages defining this environment.
# This list includes packages for all
# architectures — that's the one to use by default.
#
# (mandatory field)
packages:
- bash

# Labels connect things together.
# Workloads get installed in environments with the same label.
# They also get included in views with the same label.
#
# (mandatory field)
labels:
- test

### OPTIONAL FIELDS ###

# Architecture-specific packages.
#
# (optional field)
#arch_packages:
# x86_64:
# - arch-specific-package

# Extra installation options.
# The following are now supported:
# - "include-docs" - include documentation packages
# - "include-weak-deps" - automatically pull in "recommends" weak dependencies
#
# (optional field)
#options:
#- option

68 changes: 68 additions & 0 deletions test_configs/bash-test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
---
# This configuration file defines a "Workload" in Feedback Pipeline.
# https://tiny.distro.builders
#
# Workload is a set of packages with a purpouse that someone cares about.
# They might want to monitor it for the install size, dependencies that get
# pulled in, etc.

document: feedback-pipeline-workload
version: 1
data:
# id is the filename — that automatically prevents collisions for free!


### MANDATORY FIELDS ###

# Name is an identifier for humans
#
# (mandatory field)
name: Bash

# A short description, perhaps hinting the purpose
#
# (mandatory field)
description: Bash

# Who maintains it? This is just a freeform string
# for humans to read. In Fedora, a FAS nick is recommended.
#
# (mandatory field)
maintainer: asamalik

# Packages defining this workload.
# This list includes packages for all
# architectures — that's the one to use by default.
#
# (mandatory field)
packages:
- bash

# Labels connect things together.
# Workloads get installed in environments with the same label.
# They also get included in views with the same label.
#
# (mandatory field)
labels:
- test
options:
- strict


### OPTIONAL FIELDS ###

# Architecture-specific packages.
#
# (optional field)
#arch_packages:
# x86_64:
# - arch-specific-package

# Extra installation options.
# The following are now supported:
# - "include-docs" - include documentation packages
# - "include-weak-deps" - automatically pull in "recommends" weak dependencies
#
# (optional field)
#options:
#- option
13 changes: 13 additions & 0 deletions test_configs/view-test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
---
document: feedback-pipeline-compose-view
version: 1
data:
name: Test Package Set
description: Test package set based on F34
maintainer: bakery
labels:
- test
repository: repo-test
buildroot_strategy: dep_tracker


68 changes: 66 additions & 2 deletions test_feedback_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,69 @@

import feedback_pipeline

def test_build_completion():
assert 1 == 1
import json
import os
import pytest
import tempfile

from shutil import rmtree

@pytest.fixture(scope="module")
def feedback_pipeline_output():
with tempfile.TemporaryDirectory() as tmp:
os.mkdir(f"{tmp}/history")
feedback_pipeline.main([
"--dev-buildroot", "--htmlmin",
"--dnf-cache-dir", "/tmp/test_cr",
"test_configs", tmp])
yield tmp


def test_bash_test_repo_workload(feedback_pipeline_output):
expected_pkg_env_ids = set([
'tzdata-2021a-1.fc34.noarch',
'fedora-gpg-keys-34-1.noarch',
'fedora-release-common-34-1.noarch',
'glibc-minimal-langpack-2.33-5.fc34.aarch64',
'libgcc-11.0.1-0.3.fc34.aarch64',
'setup-2.13.7-3.fc34.noarch',
'basesystem-11-11.fc34.noarch',
'glibc-2.33-5.fc34.aarch64',
'fedora-release-34-1.noarch',
'ncurses-base-6.2-4.20200222.fc34.noarch',
'ncurses-libs-6.2-4.20200222.fc34.aarch64',
'bash-5.1.0-2.fc34.aarch64',
'filesystem-3.14-5.fc34.aarch64',
'glibc-common-2.33-5.fc34.aarch64',
'fedora-release-identity-basic-34-1.noarch',
'fedora-repos-34-1.noarch'
])

with open(f"{feedback_pipeline_output}/workload--bash-test--base-test--repo-test--aarch64.json") as w:
workload = json.load(w)
assert set(workload["data"]["pkg_env_ids"]) == expected_pkg_env_ids

def test_bash_test_repo_view(feedback_pipeline_output):
expected_pkgs = {}

with open(f"{feedback_pipeline_output}/view-packages--view-test.json") as w:
expected_pkgs = {
'fedora-gpg-keys-34-1',
'fedora-release-34-1',
'fedora-release-common-34-1',
'fedora-release-identity-basic-34-1',
'fedora-repos-34-1',
'filesystem-3.14-5.fc34',
'glibc-minimal-langpack-2.33-5.fc34',
'tzdata-2021a-1.fc34',
'basesystem-11-11.fc34',
'bash-5.1.0-2.fc34',
'ncurses-libs-6.2-4.20200222.fc34',
'ncurses-base-6.2-4.20200222.fc34',
'libgcc-11.0.1-0.3.fc34',
'glibc-2.33-5.fc34',
'setup-2.13.7-3.fc34',
'glibc-common-2.33-5.fc34'
}
view = json.load(w)
assert set(view['pkgs'].keys()) == expected_pkgs