* Use alternate self-sufficient shellcheck precommit This pre-commit does not require prerequisite on the host, making it easier to run in CI workflows. * Switch to upstream ansible-lint pre-commit hook This way, the hook is self contained and does not depend on a previous virtualenv installation. * pre-commit: fix hooks dependencies - ansible-syntax-check - tox-inventory-builder - jinja-syntax-check * Fix ci-matrix pre-commit hook - Remove dependency of pydblite which fails to setup on recent pythons - Discard shell script and put everything into pre-commit * pre-commit: apply autofixes hooks and fix the rest manually - markdownlint (manual fix) - end-of-file-fixer - requirements-txt-fixer - trailing-whitespace * Convert check_typo to pre-commit + use maintained version client9/misspell is unmaintained, and has been forked by the golangci team, see https://github.com/client9/misspell/issues/197#issuecomment-1596318684. They haven't yet added a pre-commit config, so use my fork with the pre-commit hook config until the pull request is merged. * collection-build-install convert to pre-commit * Run pre-commit hooks in dynamic pipeline Use gitlab dynamic child pipelines feature to have one source of truth for the pre-commit jobs, the pre-commit config file. Use one cache per pre-commit. This should reduce the "fetching cache" time steps in gitlab-ci, since each job will have a separate cache with only its hook installed. * Remove gitlab-ci job done in pre-commit * pre-commit: adjust mardownlint default, md fixes Use a style file as recommended by upstream. This makes for only one source of truth. Conserve previous upstream default for MD007 (upstream default changed here https://github.com/markdownlint/markdownlint/pull/373) * Update pre-commit hooks --------- Co-authored-by: Max Gautier <mg@max.gautier.name>
97 lines
3.2 KiB
Python
Executable File
97 lines
3.2 KiB
Python
Executable File
#!/usr/bin/env python
|
|
import argparse
|
|
import sys
|
|
import glob
|
|
from pathlib import Path
|
|
import yaml
|
|
import re
|
|
import jinja2
|
|
import sys
|
|
|
|
from pprint import pprint
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='Generate a Markdown table representing the CI test coverage')
|
|
parser.add_argument('--dir', default='tests/files/', help='folder with test yml files')
|
|
parser.add_argument('--output', default='docs/developers/ci.md', help='output file')
|
|
|
|
|
|
args = parser.parse_args()
|
|
p = Path(args.dir)
|
|
|
|
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath=sys.path[0]))
|
|
|
|
# Data represents CI coverage data matrix
|
|
class Data:
|
|
def __init__(self):
|
|
self.container_managers = set()
|
|
self.network_plugins = set()
|
|
self.os = set()
|
|
self.combination = set()
|
|
|
|
|
|
def set(self, container_manager, network_plugin, os):
|
|
self.container_managers.add(container_manager)
|
|
self.network_plugins.add(network_plugin)
|
|
self.os.add(os)
|
|
self.combination.add(container_manager+network_plugin+os)
|
|
|
|
def exists(self, container_manager, network_plugin, os):
|
|
return (container_manager+network_plugin+os) in self.combination
|
|
|
|
def jinja(self):
|
|
template = env.get_template('table.md.j2')
|
|
container_engines = sorted(self.container_managers)
|
|
network_plugins = sorted(self.network_plugins)
|
|
operating_systems = sorted(self.os)
|
|
|
|
return template.render(
|
|
container_engines=container_engines,
|
|
network_plugins=network_plugins,
|
|
operating_systems=operating_systems,
|
|
exists=self.exists
|
|
)
|
|
|
|
def markdown(self):
|
|
out = ''
|
|
for container_manager in self.db.get_unique_ids('container_manager'):
|
|
# Prepare the headers
|
|
out += "# " + container_manager + "\n"
|
|
headers = '|OS / CNI| '
|
|
underline = '|----|'
|
|
for network_plugin in self.db.get_unique_ids("network_plugin"):
|
|
headers += network_plugin + ' | '
|
|
underline += '----|'
|
|
out += headers + "\n" + underline + "\n"
|
|
for operating_system in self.db.get_unique_ids("operating_system"):
|
|
out += '| ' + operating_system + ' | '
|
|
for network_plugin in self.db.get_unique_ids("network_plugin"):
|
|
if self.exists(container_manager, network_plugin, operating_system):
|
|
emoji = ':white_check_mark:'
|
|
else:
|
|
emoji = ':x:'
|
|
out += emoji + ' | '
|
|
out += "\n"
|
|
|
|
pprint(self.db.get_unique_ids('operating_system'))
|
|
pprint(self.db.get_unique_ids('network_plugin'))
|
|
return out
|
|
|
|
|
|
|
|
if not p.is_dir():
|
|
print("Path is not a directory")
|
|
sys.exit(2)
|
|
|
|
data = Data()
|
|
files = p.glob('*.yml')
|
|
for f in files:
|
|
y = yaml.load(f.open(), Loader=yaml.FullLoader)
|
|
|
|
container_manager = y.get('container_manager', 'containerd')
|
|
network_plugin = y.get('kube_network_plugin', 'calico')
|
|
x = re.match(r"^[a-z-]+_([a-z0-9]+).*", f.name)
|
|
operating_system = x.group(1)
|
|
data.set(container_manager=container_manager, network_plugin=network_plugin, os=operating_system)
|
|
print(data.jinja(), file=open(args.output, 'w'))
|