feat: add source flag to use different folder as source for building sites (#5334)
All checks were successful
continuous-integration/drone/push Build is passing

I do not expect the flag to be used much, but it enforces better code design, and prevents implicitly relying on cwd.

Just a general code style improvement really.

Co-authored-by: Darragh Elliott <me@delliott.net>
Co-authored-by: tobiasd <tobiasd@fsfe.org>
Reviewed-on: #5334
Co-authored-by: delliott <delliott@fsfe.org>
Co-committed-by: delliott <delliott@fsfe.org>
This commit is contained in:
2025-10-13 12:56:30 +00:00
committed by tobiasd
parent e5882e406d
commit cba4a25036
20 changed files with 153 additions and 89 deletions

View File

@@ -55,6 +55,12 @@ def parse_arguments() -> argparse.Namespace:
type=int,
default=multiprocessing.cpu_count(),
)
parser.add_argument(
"--source",
help="Source directory, that contains the sites and global data",
default=Path(),
type=Path,
)
parser.add_argument(
"--serve",
help="Serve the webpages after rebuild",
@@ -63,8 +69,8 @@ def parse_arguments() -> argparse.Namespace:
parser.add_argument(
"--sites",
help="What site directories to build",
default=[path for path in Path().glob("?*.??*") if path.is_dir()],
type=lambda sites: [Path(site) for site in sites.split(",")],
default=None,
type=str,
)
parser.add_argument(
"--stage",
@@ -80,9 +86,17 @@ def parse_arguments() -> argparse.Namespace:
name@host:path?port.
"""),
type=str,
default="./output/final",
default=None,
)
return parser.parse_args()
args = parser.parse_args()
args.sites = (
[path for path in args.source.glob("?*.??*") if path.is_dir()]
if args.sites is None
else [args.source.joinpath(site) for site in args.sites.split(",")]
)
if args.target is None:
args.target = str(args.source.joinpath("output/final"))
return args
def main() -> None:
@@ -102,7 +116,7 @@ def main() -> None:
# TODO Should also be triggered whenever any build python file is changed
if args.full:
full()
full(args.source)
# -----------------------------------------------------------------------------
# Create XML symlinks
# -----------------------------------------------------------------------------
@@ -115,11 +129,12 @@ def main() -> None:
# otherwise. This symlinks make sure that phase 2 can easily use the right file
# for each language
global_symlinks(
args.source,
(
args.languages
if args.languages
else sorted(
(path.name for path in Path().glob("global/languages/??")),
(path.name for path in args.source.glob("global/languages/??")),
)
),
pool,
@@ -128,7 +143,9 @@ def main() -> None:
stage_required = any(
[args.stage, "@" in args.target, ":" in args.target, "," in args.target],
)
working_target = Path("./output/stage" if stage_required else args.target)
working_target = Path(
f"{args.source}/output/stage" if stage_required else args.target
)
# the two middle phases are unconditional, and run on a per site basis
for site in args.sites:
logger.info("Processing %s", site)
@@ -141,6 +158,7 @@ def main() -> None:
# Do not get access to languages to be built in,
# and other benefits of being ran later.
prepare_early_subdirectories(
args.source,
site,
args.processes,
)
@@ -152,8 +170,14 @@ def main() -> None:
)
)
# Processes needed only for subdir stuff
phase1_run(site, languages, args.processes, pool)
phase2_run(site, languages, pool, working_target.joinpath(site))
phase1_run(args.source, site, languages, args.processes, pool)
phase2_run(
args.source,
site,
languages,
pool,
working_target.joinpath(site.name),
)
logger.info("Starting Phase 3 - Global Conditional Finishing")
if stage_required:

View File

@@ -47,7 +47,7 @@ def _get_attributes(file: Path) -> dict:
return dict(attributes)
def _get_trlist(file: Path) -> etree.Element:
def _get_trlist(source: Path, file: Path) -> etree.Element:
"""
list all languages a file exists in by globbing up
the shortname (i.e. file path with file ending omitted)
@@ -58,12 +58,16 @@ def _get_trlist(file: Path) -> etree.Element:
for path in file.parent.glob(f"{get_basename(file)}.??{file.suffix}"):
tr = etree.SubElement(trlist, "tr", id=lang_from_filename(path))
tr.text = (
Path(f"global/languages/{lang_from_filename(path)}").read_text().strip()
source.joinpath(f"global/languages/{lang_from_filename(path)}")
.read_text()
.strip()
)
return trlist
def _get_set(action_file: Path, lang: str, parser: etree.XMLParser) -> etree.Element:
def _get_set(
source: Path, action_file: Path, lang: str, parser: etree.XMLParser
) -> etree.Element:
"""
import elements from source files, add file name
attribute to first element included from each file
@@ -75,7 +79,7 @@ def _get_set(action_file: Path, lang: str, parser: etree.XMLParser) -> etree.Ele
if list_file.exists():
with list_file.open("r") as file:
for path in (Path(line.strip()) for line in file):
for path in (source.joinpath(line.strip()) for line in file):
path_xml = (
path.with_suffix(f".{lang}.xml")
if path.with_suffix(f".{lang}.xml").exists()
@@ -87,6 +91,7 @@ def _get_set(action_file: Path, lang: str, parser: etree.XMLParser) -> etree.Ele
def _get_document(
source: Path,
action_lang: str,
action_file: Path,
lang: str,
@@ -97,12 +102,14 @@ def _get_document(
language=action_lang,
**_get_attributes(action_file),
)
document.append(_get_set(action_file, lang, parser))
document.append(_get_set(source, action_file, lang, parser))
document.extend(_get_xmls(action_file, parser))
return document
def _build_xmlstream(infile: Path, parser: etree.XMLParser) -> etree.Element:
def _build_xmlstream(
source: Path, infile: Path, parser: etree.XMLParser
) -> etree.Element:
"""
assemble the xml stream for feeding into xsltproc
the expected shortname and language flag indicate
@@ -128,8 +135,8 @@ def _build_xmlstream(infile: Path, parser: etree.XMLParser) -> etree.Element:
.with_suffix("")
.suffix.removeprefix(".")
)
topbanner_xml = Path(f"global/data/topbanner/.topbanner.{lang}.xml")
texts_xml = Path(f"global/data/texts/.texts.{lang}.xml")
topbanner_xml = source.joinpath(f"global/data/topbanner/.topbanner.{lang}.xml")
texts_xml = source.joinpath(f"global/data/texts/.texts.{lang}.xml")
date = str(datetime.now().date())
action_lang = ""
translation_state = ""
@@ -168,7 +175,7 @@ def _build_xmlstream(infile: Path, parser: etree.XMLParser) -> etree.Element:
)
# Add the subelements
page.append(_get_trlist(infile))
page.append(_get_trlist(source, infile))
# Make the relevant subelmenets
# and then add the relevant xmls to it
@@ -176,23 +183,27 @@ def _build_xmlstream(infile: Path, parser: etree.XMLParser) -> etree.Element:
topbanner.extend(_get_xmls(topbanner_xml, parser))
textsetbackup = etree.SubElement(page, "textsetbackup")
textsetbackup.extend(_get_xmls(Path("global/data/texts/texts.en.xml"), parser))
textsetbackup.extend(
_get_xmls(source.joinpath("global/data/texts/texts.en.xml"), parser)
)
textset = etree.SubElement(page, "textset")
textset.extend(_get_xmls(texts_xml, parser))
page.append(_get_document(action_lang, action_file, lang, parser))
page.append(_get_document(source, action_lang, action_file, lang, parser))
return page
def process_file(infile: Path, transform: etree.XSLT) -> etree._XSLTResultTree:
def process_file(
source: Path, infile: Path, transform: etree.XSLT
) -> etree._XSLTResultTree:
"""
Process a given file using the correct xsl sheet
"""
logger.debug("Processing %s", infile)
lang = lang_from_filename(infile)
parser = etree.XMLParser(remove_blank_text=True, remove_comments=True)
xmlstream = _build_xmlstream(infile, parser)
xmlstream = _build_xmlstream(source, infile, parser)
result = transform(xmlstream)
# And now a bunch of regexes to fix some links.
# xx is the language code in all comments

View File

@@ -3,18 +3,29 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
from pathlib import Path
from fsfe_website_build.lib.misc import run_command
logger = logging.getLogger(__name__)
def full() -> None:
def full(source: Path) -> None:
"""
Git clean the repo to remove all cached artifacts
Excluded the root .venv repo, as removing it mid build breaks the build, obviously
"""
logger.info("Performing a full rebuild, git cleaning")
run_command(
["git", "clean", "-fdx", "--exclude", "/.venv", "--exclude", "/.nltk_data"],
[
"git",
"--git-dir",
str(source) + "/.git",
"clean",
"-fdx",
"--exclude",
"/.venv",
"--exclude",
"/.nltk_data",
],
)

View File

@@ -10,21 +10,23 @@ from pathlib import Path
logger = logging.getLogger(__name__)
def _do_symlinking(link_type: str, lang: str) -> None:
def _do_symlinking(source: Path, link_type: str, lang: str) -> None:
"""
Helper function for global symlinking that is suitable for multithreading
"""
target = (
Path(f"global/data/{link_type}/{link_type}.{lang}.xml")
if Path(f"global/data/{link_type}/{link_type}.{lang}.xml").exists()
else Path(f"global/data/{link_type}/{link_type}.en.xml")
source.joinpath(f"global/data/{link_type}/{link_type}.{lang}.xml")
if source.joinpath(f"global/data/{link_type}/{link_type}.{lang}.xml").exists()
else source.joinpath(f"global/data/{link_type}/{link_type}.en.xml")
)
source = Path(f"global/data/{link_type}/.{link_type}.{lang}.xml")
if not source.exists():
source.symlink_to(target.relative_to(source.parent))
source_xml = source.joinpath(f"global/data/{link_type}/.{link_type}.{lang}.xml")
if not source_xml.exists():
source_xml.symlink_to(target.relative_to(source_xml.parent))
def global_symlinks(languages: list[str], pool: multiprocessing.pool.Pool) -> None:
def global_symlinks(
source: Path, languages: list[str], pool: multiprocessing.pool.Pool
) -> None:
"""
After this step, the following symlinks will exist:
* global/data/texts/.texts.<lang>.xml for each language
@@ -36,4 +38,4 @@ def global_symlinks(languages: list[str], pool: multiprocessing.pool.Pool) -> No
"""
logger.info("Creating global symlinks")
link_types = ["texts", "topbanner"]
pool.starmap(_do_symlinking, product(link_types, languages))
pool.starmap(_do_symlinking, product([source], link_types, languages))

View File

@@ -9,7 +9,9 @@ from pathlib import Path
logger = logging.getLogger(__name__)
def prepare_early_subdirectories(source_dir: Path, processes: int) -> None:
def prepare_early_subdirectories(
source: Path, source_dir: Path, processes: int
) -> None:
"""
Find any early subdir scripts in subdirectories and run them
"""
@@ -21,7 +23,7 @@ def prepare_early_subdirectories(source_dir: Path, processes: int) -> None:
# here for out subdir scripts
import early_subdir # noqa: PLC0415 # pyright: ignore [reportMissingImports]
early_subdir.run(processes, subdir_path)
early_subdir.run(source, processes, subdir_path)
# Remove its path from where things can be imported
sys.path.remove(str(subdir_path.resolve()))
# Remove it from loaded modules

View File

@@ -10,6 +10,7 @@ logger = logging.getLogger(__name__)
def prepare_subdirectories(
source: Path,
source_dir: Path,
languages: list[str],
processes: int,
@@ -25,7 +26,7 @@ def prepare_subdirectories(
# here for out subdir scripts
import subdir # noqa: PLC0415 # pyright: ignore [reportMissingImports]
subdir.run(languages, processes, subdir_path)
subdir.run(source, languages, processes, subdir_path)
# Remove its path from where things can be imported
sys.path.remove(str(subdir_path.resolve()))
# Remove it from loaded modules

View File

@@ -25,7 +25,8 @@ logger = logging.getLogger(__name__)
def phase1_run(
source_dir: Path,
source: Path,
source_site: Path,
languages: list[str],
processes: int,
pool: multiprocessing.pool.Pool,
@@ -41,9 +42,7 @@ def phase1_run(
# This step recompiles the less files into the final CSS files to be
# distributed to the web server.
update_css(
source_dir,
)
update_css(source_site)
# -----------------------------------------------------------------------------
# Update XSL stylesheets
# -----------------------------------------------------------------------------
@@ -57,12 +56,12 @@ def phase1_run(
# and events directories, the XSL files, if updated, will be copied for the
# per-year archives.
update_stylesheets(source_dir, pool)
update_stylesheets(source_site, pool)
# -----------------------------------------------------------------------------
# Dive into subdirectories
# -----------------------------------------------------------------------------
# Find any makefiles in subdirectories and run them
prepare_subdirectories(source_dir, languages, processes)
prepare_subdirectories(source, source_site, languages, processes)
# -----------------------------------------------------------------------------
# Create XSL symlinks
@@ -74,14 +73,14 @@ def phase1_run(
# determine which XSL script should be used to build a HTML page from a source
# file.
update_defaultxsls(source_dir, pool)
update_defaultxsls(source_site, pool)
# -----------------------------------------------------------------------------
# Update local menus
# -----------------------------------------------------------------------------
# After this step, all .localmenu.??.xml files will be up to date.
update_localmenus(source_dir, languages, pool)
update_localmenus(source, source_site, languages, pool)
# -----------------------------------------------------------------------------
# Update XML filelists
# -----------------------------------------------------------------------------
@@ -93,4 +92,4 @@ def phase1_run(
# correct XML files when generating the HTML pages. It is taken care that
# these files are only updated whenever their content actually changes, so
# they can serve as a prerequisite in the phase 2 Makefile.
update_xmllists(source_dir, languages, pool)
update_xmllists(source, source_site, languages, pool)

View File

@@ -76,6 +76,7 @@ def _write_localmenus(
def update_localmenus(
source: Path,
source_dir: Path,
languages: list[str],
pool: multiprocessing.pool.Pool,
@@ -94,7 +95,9 @@ def update_localmenus(
if xslt_root.xpath("//localmenu"):
directory = xslt_root.xpath("//localmenu/@dir")
directory = (
directory[0] if directory else str(file.parent.relative_to(Path()))
str(source.joinpath(directory[0]))
if directory
else str(file.parent.resolve())
)
if directory not in files_by_dir:
files_by_dir[directory] = set()

View File

@@ -20,7 +20,7 @@ def _update_sheet(file: Path) -> None:
"""
xslt_root = etree.parse(file)
imports = [
file.parent.joinpath(imp.get("href")).resolve().relative_to(Path.cwd())
file.parent.joinpath(imp.get("href")).resolve()
for imp in xslt_root.xpath(
"//xsl:import", namespaces={"xsl": "http://www.w3.org/1999/XSL/Transform"}
)

View File

@@ -21,7 +21,8 @@ from fsfe_website_build.lib.misc import (
logger = logging.getLogger(__name__)
def _update_for_base(
def _update_for_base( # noqa: PLR0913
source: Path,
base: Path,
all_xml: set[Path],
nextyear: str,
@@ -56,14 +57,14 @@ def _update_for_base(
for xml_file in filter(
lambda xml_file:
# Matches glob pattern
fnmatch.fnmatchcase(str(xml_file), pattern)
fnmatch.fnmatchcase(str(xml_file.relative_to(source)), pattern)
# contains tag if tag in pattern
and (
any(
(
etree.parse(
xml_file_with_ending,
).find(f".//tag[@key='{tag}']")
etree.parse(xml_file_with_ending).find(
f".//tag[@key='{tag}']"
)
is not None
for xml_file_with_ending in xml_file.parent.glob(
f"{xml_file.name}.*.xml"
@@ -77,12 +78,14 @@ def _update_for_base(
and len(str(xml_file)) > 0,
all_xml,
):
matching_files.add(str(xml_file))
matching_files.add(str(xml_file.relative_to(source)))
for file in Path().glob(f"{base}.??.xhtml"):
for file in base.parent.glob(f"{base.name}.??.xhtml"):
xslt_root = etree.parse(file)
for module in xslt_root.xpath("//module"):
matching_files.add(f"global/data/modules/{module.get('id')}".strip())
matching_files.add(
f"{source}/global/data/modules/{module.get('id').strip()}"
)
matching_files = sorted(matching_files)
update_if_changed(
Path(f"{base.parent}/.{base.name}.xmllist"),
@@ -91,6 +94,7 @@ def _update_for_base(
def _update_module_xmllists(
source: Path,
source_dir: Path,
languages: list[str],
pool: multiprocessing.pool.Pool,
@@ -105,7 +109,7 @@ def _update_module_xmllists(
for path in filter(
lambda path: lang_from_filename(path) in languages,
list(source_dir.glob("**/*.*.xml"))
+ list(Path("global/").glob("**/*.*.xml")),
+ list(source.joinpath("global/").glob("**/*.*.xml")),
)
}
source_bases = {path.with_suffix("") for path in source_dir.glob("**/*.sources")}
@@ -123,23 +127,25 @@ def _update_module_xmllists(
lastyear = str(datetime.datetime.today().year - 1)
pool.starmap(
_update_for_base,
((base, all_xml, nextyear, thisyear, lastyear) for base in all_bases),
((source, base, all_xml, nextyear, thisyear, lastyear) for base in all_bases),
)
def _check_xmllist_deps(file: Path) -> None:
def _check_xmllist_deps(source: Path, file: Path) -> None:
"""
If any of the sources in an xmllist are newer than it, touch the xmllist
"""
xmls = set()
with file.open(mode="r") as fileobj:
for line in fileobj:
for newfile in Path().glob(line.strip() + ".??.xml"):
path_line = source.joinpath(line.strip())
for newfile in path_line.parent.glob(path_line.name + ".??.xml"):
xmls.add(newfile)
touch_if_newer_dep(file, list(xmls))
def _touch_xmllists_with_updated_deps(
source: Path,
source_dir: Path,
pool: multiprocessing.pool.Pool,
) -> None:
@@ -147,10 +153,14 @@ def _touch_xmllists_with_updated_deps(
Touch all .xmllist files where one of the contained files has changed
"""
logger.info("Checking contents of XML lists")
pool.map(_check_xmllist_deps, source_dir.glob("**/.*.xmllist"))
pool.starmap(
_check_xmllist_deps,
[(source, path) for path in source_dir.glob("**/.*.xmllist")],
)
def update_xmllists(
source: Path,
source_dir: Path,
languages: list[str],
pool: multiprocessing.pool.Pool,
@@ -172,5 +182,5 @@ def update_xmllists(
When a tag has been removed from the last XML file where it has been used,
the tagged-* are correctly deleted.
"""
_update_module_xmllists(source_dir, languages, pool)
_touch_xmllists_with_updated_deps(source_dir, pool)
_update_module_xmllists(source, source_dir, languages, pool)
_touch_xmllists_with_updated_deps(source, source_dir, pool)

View File

@@ -15,7 +15,8 @@ from fsfe_website_build.lib.process_file import process_file
logger = logging.getLogger(__name__)
def _process_set(
def _process_set( # noqa: PLR0913
source: Path,
source_dir: Path,
languages: list[str],
target: Path,
@@ -63,12 +64,13 @@ def _process_set(
),
):
logger.debug("Building %s", target_file)
result = process_file(source_file, transform)
result = process_file(source, source_file, transform)
target_file.parent.mkdir(parents=True, exist_ok=True)
result.write_output(target_file)
def process_files(
source: Path,
source_dir: Path,
languages: list[str],
pool: multiprocessing.pool.Pool,
@@ -103,13 +105,11 @@ def process_files(
file.with_suffix("")
.with_suffix(".xsl")
.resolve()
.relative_to(source_dir.parent.resolve())
.relative_to(source.resolve())
)
# if that does not exist, default to
xhtml_default_processor = (
file.parent.joinpath(".default.xsl")
.resolve()
.relative_to(source_dir.parent.resolve())
file.parent.joinpath(".default.xsl").resolve().relative_to(source.resolve())
)
xhtml_processor = (
xhtml_named_processor
@@ -125,7 +125,7 @@ def process_files(
pool.starmap(
_process_set,
(
(source_dir, languages, target, processor, files)
(source, source_dir, languages, target, processor, files)
for processor, files in process_files_dict.items()
),
)

View File

@@ -18,6 +18,7 @@ logger = logging.getLogger(__name__)
def phase2_run(
source: Path,
source_dir: Path,
languages: list[str],
pool: multiprocessing.pool.Pool,
@@ -27,7 +28,7 @@ def phase2_run(
Run all the necessary sub functions for phase2.
"""
logger.info("Starting Phase 2 - Generating output")
process_files(source_dir, languages, pool, target)
process_files(source, source_dir, languages, pool, target)
create_index_symlinks(pool, target)
create_language_symlinks(pool, target)
copy_files(source_dir, pool, target)

View File

@@ -65,7 +65,7 @@ def process_file_link_rewrites_test(
)
assert isinstance(sample_xsl_transformer, etree.XSLT)
result_doc = process_file(xml_path, sample_xsl_transformer)
result_doc = process_file(Path(), xml_path, sample_xsl_transformer)
assert isinstance(result_doc, etree._XSLTResultTree)
# We get a list, but as we have only one link in the above sample
# we only need to care about the first one

View File

@@ -28,20 +28,20 @@ def _gen_archive_index(
update_if_changed(directory.joinpath(f"index.{lang}.xhtml"), content)
def _gen_index_sources(directory: Path) -> None:
def _gen_index_sources(source: Path, directory: Path) -> None:
update_if_changed(
directory.joinpath("index.sources"),
dedent(
f"""\
{directory}/event-*:[]
{directory}/.event-*:[]
{directory.parent}/.localmenu:[]
{directory.relative_to(source)}/event-*:[]
{directory.relative_to(source)}/.event-*:[]
{directory.parent.relative_to(source)}/.localmenu:[]
""",
),
)
def run(languages: list[str], processes: int, working_dir: Path) -> None:
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None:
"""
preparation for news subdirectory
"""
@@ -54,5 +54,5 @@ def run(languages: list[str], processes: int, working_dir: Path) -> None:
)
logger.debug("Finished Archiving")
# Generate index.sources for every year
pool.map(_gen_index_sources, years)
pool.starmap(_gen_index_sources, [(source, year) for year in years])
logger.debug("Finished generating sources")

View File

@@ -15,7 +15,7 @@ from lxml import etree
logger = logging.getLogger(__name__)
def run(languages: list[str], processes: int, working_dir: Path) -> None: # noqa: ARG001 # We allow unused args for subdirs
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None: # noqa: ARG001 # We allow unused args for subdirs
"""
Internal subdir preparation
"""

View File

@@ -29,14 +29,14 @@ def _gen_archive_index(
update_if_changed(directory.joinpath(f"index.{lang}.xhtml"), content)
def _gen_index_sources(directory: Path) -> None:
def _gen_index_sources(source: Path, directory: Path) -> None:
update_if_changed(
directory.joinpath("index.sources"),
dedent(
f"""\
{directory}/news-*:[]
{directory}/.news-*:[]
{directory.parent}/.localmenu:[]
{directory.relative_to(source)}/news-*:[]
{directory.relative_to(source)}/.news-*:[]
{directory.parent.relative_to(source)}/.localmenu:[]
""",
),
)
@@ -64,7 +64,7 @@ def _gen_xml_files(working_dir: Path, file: Path) -> None:
)
def run(languages: list[str], processes: int, working_dir: Path) -> None:
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None:
"""
preparation for news subdirectory
"""
@@ -77,7 +77,7 @@ def run(languages: list[str], processes: int, working_dir: Path) -> None:
)
logger.debug("Finished Archiving")
# Generate index.sources for every year
pool.map(_gen_index_sources, years)
pool.starmap(_gen_index_sources, [(source, year) for year in years])
logger.debug("Finished generating sources")
pool.starmap(
_gen_xml_files,

View File

@@ -67,7 +67,7 @@ def _process_file(file: Path, stopwords: set[str]) -> dict:
}
def run(languages: list[str], processes: int, working_dir: Path) -> None:
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None: # noqa: ARG001
"""
This step runs a Python tool that creates an index of all news and
articles. It extracts titles, teaser, tags, dates and potentially more.

View File

@@ -72,7 +72,7 @@ def _update_tag_sets(
)
def run(languages: list[str], processes: int, working_dir: Path) -> None:
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None: # noqa: ARG001
"""
Update Tag pages, xmllists and xmls

View File

@@ -14,7 +14,7 @@ from lxml import etree
logger = logging.getLogger(__name__)
def run(processes: int, working_dir: Path) -> None:
def run(source: Path, processes: int, working_dir: Path) -> None:
"""
Place filler indices to encourage the site to
ensure that status pages for all langs are build.
@@ -44,6 +44,6 @@ def run(processes: int, working_dir: Path) -> None:
),
index_content,
)
for path in Path().glob("global/languages/*")
for path in source.glob("global/languages/*")
),
)

View File

@@ -154,7 +154,7 @@ def _create_translation_file(
update_if_changed(work_file, result_str)
def run(languages: list[str], processes: int, working_dir: Path) -> None:
def run(source: Path, languages: list[str], processes: int, working_dir: Path) -> None: # noqa: ARG001
"""
Build translation-status xmls for languages where the status has changed.
Xmls are placed in target_dir, and only languages are processed.