Compare commits
No commits in common. "2ce86bb6841c75cefd71a8d97058b96398e1b98b" and "d3f37bed26566bdcbc189b5515369e74591856f0" have entirely different histories.
2ce86bb684
...
d3f37bed26
@ -1,106 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import multiprocessing
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from build.lib.misc import get_basepath
|
|
||||||
from build.lib.process_file import process_file
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _run_process(
|
|
||||||
target_file: Path, processor: Path, source_file: Path, basename: Path, lang: str
|
|
||||||
):
|
|
||||||
# if the target file does not exist, we make it
|
|
||||||
if not target_file.exists() or any(
|
|
||||||
# If any source file is newer than the file to be generated
|
|
||||||
# we recreate the generated file
|
|
||||||
# if the source file does not exist, ignore it.
|
|
||||||
map(
|
|
||||||
lambda file: (
|
|
||||||
file.exists() and file.stat().st_mtime > target_file.stat().st_mtime
|
|
||||||
),
|
|
||||||
[
|
|
||||||
(
|
|
||||||
source_file
|
|
||||||
if source_file.exists()
|
|
||||||
else basename.with_suffix(".en.xhtml")
|
|
||||||
),
|
|
||||||
processor,
|
|
||||||
(
|
|
||||||
source_file.parent.joinpath("." + basename.name).with_suffix(
|
|
||||||
".xmllist"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
Path(f"global/data/texts/.texts.{lang}.xml"),
|
|
||||||
Path(f"global/data/topbanner/.topbanner.{lang}.xml"),
|
|
||||||
Path("global/data/texts/texts.en.xml"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
):
|
|
||||||
logger.debug(f"Building {target_file}")
|
|
||||||
result = process_file(source_file, processor)
|
|
||||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
target_file.write_text(result)
|
|
||||||
|
|
||||||
|
|
||||||
def _process_dir(languages: list[str], target: Path, dir: Path) -> None:
|
|
||||||
for basename in set(map(lambda path: path.with_suffix(""), dir.glob("*.??.xhtml"))):
|
|
||||||
for lang in languages:
|
|
||||||
source_file = basename.with_suffix(f".{lang}.xhtml")
|
|
||||||
target_file = target.joinpath(source_file).with_suffix(".html")
|
|
||||||
processor = (
|
|
||||||
basename.with_suffix(".xsl")
|
|
||||||
if basename.with_suffix(".xsl").exists()
|
|
||||||
else basename.parent.joinpath(".default.xsl")
|
|
||||||
)
|
|
||||||
_run_process(target_file, processor, source_file, basename, lang)
|
|
||||||
|
|
||||||
|
|
||||||
def _process_stylesheet(languages: list[str], target: Path, processor: Path) -> None:
|
|
||||||
basename = get_basepath(processor)
|
|
||||||
destination_base = target.joinpath(basename)
|
|
||||||
for lang in languages:
|
|
||||||
target_file = destination_base.with_suffix(
|
|
||||||
f".{lang}{processor.with_suffix('').suffix}"
|
|
||||||
)
|
|
||||||
source_file = basename.with_suffix(f".{lang}.xhtml")
|
|
||||||
_run_process(target_file, processor, source_file, basename, lang)
|
|
||||||
|
|
||||||
|
|
||||||
def process_files(
|
|
||||||
languages: list[str], pool: multiprocessing.Pool, target: Path
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Build .html, .rss and .ics files from .xhtml sources
|
|
||||||
|
|
||||||
"""
|
|
||||||
# TODO for performance it would be better to iterate by processor xls, and parse it only once and pass the xsl object to called function.
|
|
||||||
logger.info("Processing xhtml files")
|
|
||||||
pool.starmap(
|
|
||||||
_process_dir,
|
|
||||||
map(
|
|
||||||
lambda dir: (languages, target, dir),
|
|
||||||
set(map(lambda path: path.parent, Path("").glob("*?.?*/**/*.*.xhtml"))),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
logger.info("Processing rss files")
|
|
||||||
pool.starmap(
|
|
||||||
_process_stylesheet,
|
|
||||||
map(
|
|
||||||
lambda processor: (languages, target, processor),
|
|
||||||
Path("").glob("*?.?*/**/*.rss.xsl"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
logger.info("Processing ics files")
|
|
||||||
pool.starmap(
|
|
||||||
_process_stylesheet,
|
|
||||||
map(
|
|
||||||
lambda processor: (languages, target, processor),
|
|
||||||
Path("").glob("*?.?*/**/*.ics.xsl"),
|
|
||||||
),
|
|
||||||
)
|
|
68
build/phase2/process_rss_ics_files.py
Normal file
68
build/phase2/process_rss_ics_files.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from build.lib.misc import get_basepath
|
||||||
|
from build.lib.process_file import process_file
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _process_stylesheet(languages: list[str], target: Path, source_xsl: Path) -> None:
|
||||||
|
base_file = get_basepath(source_xsl)
|
||||||
|
destination_base = target.joinpath(base_file)
|
||||||
|
for lang in languages:
|
||||||
|
target_file = destination_base.with_suffix(
|
||||||
|
f".{lang}{source_xsl.with_suffix('').suffix}"
|
||||||
|
)
|
||||||
|
source_xhtml = base_file.with_suffix(f".{lang}.xhtml")
|
||||||
|
if not target_file.exists() or any(
|
||||||
|
# If any source file is newer than the file to be generated
|
||||||
|
map(
|
||||||
|
lambda file: (
|
||||||
|
file.exists() and file.stat().st_mtime > target_file.stat().st_mtime
|
||||||
|
),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
source_xhtml
|
||||||
|
if source_xhtml.exists()
|
||||||
|
else base_file.with_suffix(".en.xhtml")
|
||||||
|
),
|
||||||
|
source_xsl,
|
||||||
|
Path(f"global/data/texts/.texts.{lang}.xml"),
|
||||||
|
Path("global/data/texts/texts.en.xml"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
):
|
||||||
|
logger.debug(f"Building {target_file}")
|
||||||
|
result = process_file(source_xhtml, source_xsl)
|
||||||
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
target_file.write_text(result)
|
||||||
|
|
||||||
|
|
||||||
|
def process_rss_ics_files(
|
||||||
|
languages: list[str], pool: multiprocessing.Pool, target: Path
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Build .rss files from .xhtml sources
|
||||||
|
"""
|
||||||
|
logger.info("Processing rss files")
|
||||||
|
pool.starmap(
|
||||||
|
_process_stylesheet,
|
||||||
|
map(
|
||||||
|
lambda source_xsl: (languages, target, source_xsl),
|
||||||
|
Path("").glob("*?.?*/**/*.rss.xsl"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
logger.info("Processing ics files")
|
||||||
|
pool.starmap(
|
||||||
|
_process_stylesheet,
|
||||||
|
map(
|
||||||
|
lambda source_xsl: (languages, target, source_xsl),
|
||||||
|
Path("").glob("*?.?*/**/*.ics.xsl"),
|
||||||
|
),
|
||||||
|
)
|
73
build/phase2/process_xhtml_files.py
Normal file
73
build/phase2/process_xhtml_files.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from build.lib.process_file import process_file
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _process_dir(languages: list[str], target: Path, dir: Path) -> None:
|
||||||
|
for basename in set(map(lambda path: path.with_suffix(""), dir.glob("*.??.xhtml"))):
|
||||||
|
for lang in languages:
|
||||||
|
source_file = basename.with_suffix(f".{lang}.xhtml")
|
||||||
|
target_file = target.joinpath(source_file).with_suffix(".html")
|
||||||
|
processor = (
|
||||||
|
basename.with_suffix(".xsl")
|
||||||
|
if basename.with_suffix(".xsl").exists()
|
||||||
|
else basename.parent.joinpath(".default.xsl")
|
||||||
|
)
|
||||||
|
if not target_file.exists() or any(
|
||||||
|
# If any source file is newer than the file to be generated
|
||||||
|
# If the file does not exist to
|
||||||
|
map(
|
||||||
|
lambda file: (
|
||||||
|
file.exists()
|
||||||
|
and file.stat().st_mtime > target_file.stat().st_mtime
|
||||||
|
),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
source_file
|
||||||
|
if source_file.exists()
|
||||||
|
else basename.with_suffix(".en.xhtml")
|
||||||
|
),
|
||||||
|
processor,
|
||||||
|
(
|
||||||
|
source_file.parent.joinpath(
|
||||||
|
"." + basename.name
|
||||||
|
).with_suffix(".xmllist")
|
||||||
|
),
|
||||||
|
Path(f"global/data/texts/.texts.{lang}.xml"),
|
||||||
|
Path(f"global/data/topbanner/.topbanner.{lang}.xml"),
|
||||||
|
Path("global/data/texts/texts.en.xml"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
):
|
||||||
|
logger.debug(f"Building {target_file}")
|
||||||
|
result = process_file(source_file, processor)
|
||||||
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
target_file.write_text(result)
|
||||||
|
|
||||||
|
|
||||||
|
def process_xhtml_files(
|
||||||
|
languages: list[str], pool: multiprocessing.Pool, target: Path
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Build .html files from .xhtml sources
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
# It should be possible to upgrade this and process_rss_ics files such that only one functions is needed
|
||||||
|
# Also for performance it would be better to iterate by processor xls, and parse it only once and pass the xsl object to called function.
|
||||||
|
logger.info("Processing xhtml files")
|
||||||
|
|
||||||
|
pool.starmap(
|
||||||
|
_process_dir,
|
||||||
|
map(
|
||||||
|
lambda dir: (languages, target, dir),
|
||||||
|
set(map(lambda path: path.parent, Path("").glob("*?.?*/**/*.*.xhtml"))),
|
||||||
|
),
|
||||||
|
)
|
@ -12,7 +12,8 @@ from pathlib import Path
|
|||||||
from .copy_files import copy_files
|
from .copy_files import copy_files
|
||||||
from .create_index_symlinks import create_index_symlinks
|
from .create_index_symlinks import create_index_symlinks
|
||||||
from .create_language_symlinks import create_language_symlinks
|
from .create_language_symlinks import create_language_symlinks
|
||||||
from .process_files import process_files
|
from .process_rss_ics_files import process_rss_ics_files
|
||||||
|
from .process_xhtml_files import process_xhtml_files
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -22,7 +23,8 @@ def phase2_run(languages: list[str], pool: multiprocessing.Pool, target: Path):
|
|||||||
Run all the necessary sub functions for phase2.
|
Run all the necessary sub functions for phase2.
|
||||||
"""
|
"""
|
||||||
logger.info("Starting Phase 2 - Generating output")
|
logger.info("Starting Phase 2 - Generating output")
|
||||||
process_files(languages, pool, target)
|
process_xhtml_files(languages, pool, target)
|
||||||
create_index_symlinks(pool, target)
|
create_index_symlinks(pool, target)
|
||||||
create_language_symlinks(pool, target)
|
create_language_symlinks(pool, target)
|
||||||
|
process_rss_ics_files(languages, pool, target)
|
||||||
copy_files(pool, target)
|
copy_files(pool, target)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user