Compare commits

..

No commits in common. "master" and "master" have entirely different histories.

1415 changed files with 5303 additions and 30739 deletions

View File

@ -7,16 +7,12 @@ clone:
depth: 150
steps:
- name: check-python
image: ghcr.io/astral-sh/ruff:latest
command: [ "check", "." ]
- name: check-custom
- name: checks
image: debian:bookworm
commands:
- apt update
# Install required packages
- apt install --yes --no-install-recommends coreutils sed grep libxml2-utils git findutils perl-base file mediainfo curl
- apt install --yes coreutils sed grep libxml2-utils git findutils perl-base file mediainfo curl
# Check whether non-EN news item would appear on front-page
- bash tools/check-non-en-frontpage.sh news
# Run pre-commit checks
@ -24,89 +20,15 @@ steps:
# Check syntax for all files as a safety net
- find . -type f \( -iname "*.xhtml" -o -iname "*.xml" -o -iname "*.xsl" \) -exec xmllint --noout {} +
- name: deploy-master
image: docker:27.4.1
environment:
# Environment variables necessary for rootless Docker
XDG_RUNTIME_DIR: "/run/user/1001"
DOCKER_HOST: "unix:///run/user/1001/docker.sock"
# Target bunsen directly, and use ipv4 proxies for noddack and gahn, as ipv6 broken.
TARGET: "www@bunsen.fsfeurope.org:fsfe.org/global/,www@proxy.noris.fsfeurope.org:fsfe.org/global/?10322,www@proxy.plutex.fsfeurope.org:fsfe.org/global/?10322"
KEY_PRIVATE:
from_secret: KEY_PRIVATE
KEY_PASSWORD:
from_secret: KEY_PASSWORD
GIT_TOKEN:
from_secret: BUILD_TOKEN
VOLUME:
website-cached-master
volumes:
# Mounting Docker socket of rootless docker user
- name: dockersock
path: /run/user/1001/docker.sock
commands:
- docker ps && echo "tampered with"
# If we are in a cron job, then do a full rebuild
# Ideally the cron would set the flag itself, but drone does not support that.
- if [ "$DRONE_BUILD_EVENT" = "cron" ]; then EXTRA_FLAGS="--full"; fi
- docker compose -p fsfe-website run --remove-orphans --build build --target "$TARGET" $EXTRA_FLAGS
when:
branch:
- master
event:
exclude:
- pull_request
- name: deploy-test
image: docker:27.4.1
environment:
# Environment variables necessary for rootless Docker
XDG_RUNTIME_DIR: "/run/user/1001"
DOCKER_HOST: "unix:///run/user/1001/docker.sock"
# Target bunsen directly, and use ipv4 proxies for noddack and gahn, as ipv6 broken.
TARGET: "www@bunsen.fsfeurope.org:test.fsfe.org/global/,www@proxy.noris.fsfeurope.org:test.fsfe.org/global/?10322,www@proxy.plutex.fsfeurope.org:test.fsfe.org/global/?10322"
KEY_PRIVATE:
from_secret: KEY_PRIVATE
KEY_PASSWORD:
from_secret: KEY_PASSWORD
GIT_TOKEN:
from_secret: BUILD_TOKEN
volumes:
# Mounting Docker socket of rootless docker user
- name: dockersock
path: /run/user/1001/docker.sock
commands:
- docker ps && echo "tampered with"
# If we are in a cron job, then do a full rebuild
# Ideally the cron would set the flag itself, but drone does not support that.
- if [ "$DRONE_BUILD_EVENT" = "cron" ]; then EXTRA_FLAGS="--full"; fi
- docker compose -p fsfe-website run --remove-orphans --build build --target "$TARGET" $EXTRA_FLAGS
when:
branch:
- test
event:
exclude:
- pull_request
trigger:
branch:
- master
- test
event:
- cron
- custom
- pull_request
- push
node:
cont2: noris
volumes:
# Define Docker socket of rootless docker user
- name: dockersock
host:
path: /run/user/1001/docker.sock
- pull_request
---
kind: signature
hmac: 1f8d1a3a595b66777a095a331964f64d83c9326ce7cc023d6830e15e715a50dc
hmac: 4c0dd0f272458d12234c72f66c4d420069591cac83819644df3c03a280102ded
...

24
.gitignore vendored
View File

@ -1,18 +1,22 @@
*/look/fsfe.min.css
*/look/valentine.min.css
fsfe.org/events/????/index.??.xhtml
fsfe.org/events/????/index.sources
fsfe.org/events/????/index.xsl
fsfe.org/news/????/index.??.xhtml
fsfe.org/news/????/index.sources
fsfe.org/news/????/index.xsl
fsfe.org/news/*/.*.??.xml
global/data/texts/.texts.??.xml
global/data/topbanner/.topbanner.??.xml
.default.xsl
.localmenu.*.xml
.*.xmllist
fsfe.org/search/index.js
fsfe.org/tags/tagged-*.en.xhtml
fsfe.org/tags/.tags.??.xml
# Local build stuff
output
# Python venv
.venv
__pycache__
#Nltk
.nltk_data
# Secrets
# docker compose
.env
# drone
secrets.txt
## Status dir stuff
status.fsfe.org/*/data*/*

View File

@ -1,34 +1,23 @@
FROM debian:latest
# Install deps
FROM debian:bookworm-slim
RUN apt update
RUN apt install --yes --no-install-recommends \
# Install required packages
RUN apt install --yes \
bash \
coreutils \
rsync \
libxslt1.1 \
libxml2 \
golang \
xsltproc \
libxml2-utils \
sed \
findutils \
grep \
make \
libc-bin \
wget \
procps \
python3 \
python3-venv \
python3-pip \
git \
node-less \
openssh-client \
expect
# Setup venv
ENV VIRTUAL_ENV=/opt/venv
RUN python3 -m venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
# Copy the requirements
# Done in a seperate step for optimal docker caching
COPY ./requirements.txt /website-source/requirements.txt
RUN pip install -r /website-source/requirements.txt
# Copy everything else
COPY . /website-source/
WORKDIR /website-source
ENTRYPOINT [ "bash", "./entrypoint.sh" ]
python3-bs4
WORKDIR /fsfe-websites
ENTRYPOINT ["bash", "./build.sh" ]

157
Makefile Normal file
View File

@ -0,0 +1,157 @@
# -----------------------------------------------------------------------------
# Makefile for FSFE website build, phase 1
# -----------------------------------------------------------------------------
# This Makefile is executed in the root of the source directory tree, and
# creates some .xml and xhtml files as well as some symlinks, all of which
# serve as input files in phase 2. The whole phase 1 runs within the source
# directory tree and does not touch the target directory tree at all.
# -----------------------------------------------------------------------------
.PHONY: all .FORCE
.FORCE:
# This will be overwritten in the command line running this Makefile.
build_env = development
languages = none
# -----------------------------------------------------------------------------
# Build search index
# -----------------------------------------------------------------------------
# This step runs a Python tool that creates an index of all news and
# articles. It extracts titles, teaser, tags, dates and potentially more.
# The result will be fed into a JS file.
.PHONY: searchindex
all: searchindex
searchindex:
python3 tools/index-website.py
# -----------------------------------------------------------------------------
# Update CSS files
# -----------------------------------------------------------------------------
# This step recompiles the less files into the final CSS files to be
# distributed to the web server.
ifneq ($(build_env),development)
websites:=$(shell find . -mindepth 2 -maxdepth 2 -type d -regex "./[a-z\.]+\.[a-z]+/look")
all: $(foreach dir,$(websites), $(dir)/fsfe.min.css $(dir)/valentine.min.css)
$(dir $@)%.min.css: $(shell find $(dir $@) -name '*.less')
echo "* Compiling $@"
lessc "$*.less" -x "$@"
endif
# -----------------------------------------------------------------------------
# Update XSL stylesheets
# -----------------------------------------------------------------------------
# This step updates (actually: just touches) all XSL files which depend on
# another XSL file that has changed since the last build run. The phase 2
# Makefile then only has to consider the directly used stylesheet as a
# prerequisite for building each file and doesn't have to worry about other
# stylesheets imported into that one.
# This must run before the "dive into subdirectories" step, because in the news
# and events directories, the XSL files, if updated, will be copied for the
# per-year archives.
.PHONY: stylesheets
all: stylesheets
stylesheets: $(SUBDIRS)
tools/update_stylesheets.sh
# -----------------------------------------------------------------------------
# Dive into subdirectories
# -----------------------------------------------------------------------------
SUBDIRS := $(shell find . -regex "./[a-z\.]+\.[a-z]+/.*/Makefile" | xargs dirname)
all: $(SUBDIRS)
$(SUBDIRS): .FORCE
echo "* Preparing subdirectory $@"
$(MAKE) --silent --directory=$@ languages="$(languages)"
# -----------------------------------------------------------------------------
# Create XML symlinks
# -----------------------------------------------------------------------------
# After this step, the following symlinks will exist:
# * global/data/texts/.texts.<lang>.xml for each language
# * global/data/topbanner/.topbanner.<lang>.xml for each language
# Each of these symlinks will point to the corresponding file without a dot at
# the beginning of the filename, if present, and to the English version
# otherwise. This symlinks make sure that phase 2 can easily use the right file
# for each language, also as a prerequisite in the Makefile.
TEXTS_LINKS := $(foreach lang,$(languages),global/data/texts/.texts.$(lang).xml)
all: $(TEXTS_LINKS)
global/data/texts/.texts.%.xml: .FORCE
if [ -f global/data/texts/texts.$*.xml ]; then \
ln -sf texts.$*.xml $@; \
else \
ln -sf texts.en.xml $@; \
fi
TOPBANNER_LINKS := $(foreach lang,$(languages),global/data/topbanner/.topbanner.$(lang).xml)
all: $(TOPBANNER_LINKS)
global/data/topbanner/.topbanner.%.xml: .FORCE
if [ -f global/data/topbanner/topbanner.$*.xml ]; then \
ln -sf topbanner.$*.xml $@; \
else \
ln -sf topbanner.en.xml $@; \
fi
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The following steps are handled in an external script, because the list of
# files to generate is not known when the Makefile starts - some new tags might
# be introduced when generating the .xml files in the news/* subdirectories.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# -----------------------------------------------------------------------------
# Create XSL symlinks
# -----------------------------------------------------------------------------
# After this step, each directory with source files for HTML pages contains a
# symlink named .default.xsl and pointing to the default.xsl "responsible" for
# this directory. These symlinks make it easier for the phase 2 Makefile to
# determine which XSL script should be used to build a HTML page from a source
# file.
.PHONY: default_xsl
all: default_xsl
default_xsl:
tools/update_defaultxsls.sh
# -----------------------------------------------------------------------------
# Update local menus
# -----------------------------------------------------------------------------
# After this step, all .localmenu.??.xml files will be up to date.
.PHONY: localmenus
all: localmenus
localmenus: $(SUBDIRS)
tools/update_localmenus.sh "$(languages)"
# -----------------------------------------------------------------------------
# Update XML filelists
# -----------------------------------------------------------------------------
# After this step, the following files will be up to date:
# * tags/tagged-<tags>.en.xhtml for each tag used. Apart from being
# automatically created, these are regular source files for HTML pages, and
# in phase 2 are built into pages listing all news items and events for a
# tag.
# * tags/.tags.??.xml with a list of the tags useed.
# * <dir>/.<base>.xmllist for each <dir>/<base>.sources as well as for each
# tags/tagged-<tags>.en.xhtml. These files are used in phase 2 to include the
# correct XML files when generating the HTML pages. It is taken care that
# these files are only updated whenever their content actually changes, so
# they can serve as a prerequisite in the phase 2 Makefile.
.PHONY: xmllists
all: xmllists
xmllists: $(SUBDIRS)
tools/update_xmllists.sh "$(languages)"

134
README.md
View File

@ -4,19 +4,16 @@ This repository contains the source files of [fsfe.org](https://fsfe.org), pdfre
## Table of Contents
- [Technical information](#technical-information)
- [Structure](#structure)
- [Contribute](#contribute)
- [Translate](#translate)
- [Build](#build)
* [Technical information](#technical-information)
* [Structure](#structure)
* [Contribute](#contribute)
* [Translate](#translate)
* [Build](#build)
## Technical information
Our web team has compiled some information about technology used for this website on the [Information for Webmasters](https://fsfe.org/contribute/web/) page. This is mainly focused on page content.
For information on how the build process works see [docs subfolder](./docs/overview.md). For more information on contributing to the buid process, please see the [contributor docs](./docs/contributing.md) for some useful tips.
Some tips for management can be found in the [management docs](./docs/management.md)
Our web team has compiled some information about technology used for this website on the [Information for Webmasters](https://fsfe.org/contribute/web/) page.
## Structure
@ -28,39 +25,38 @@ Every website served using this repo has its own folder with the full domain nam
This repository also contains the source files of other websites the FSFE hosts:
- `fsfe.org` for [fsfe.org](http://fsfe.org)
- `activities/android` for [freeyourandroid.org](http://freeyourandroid.org)
- `activities/ilovefs` for [ilovefs.org](http://ilovefs.org)
- `drm.info` for [drm.info](http://drm.info)
- `pdfreaders.org` for [pdfreaders.org](http://pdfreaders.org)
- [test.fsfe.org](https://test.fsfe.org) is fsfe.org built from the test branch of this repository
* `fsfe.org` for [fsfe.org](http://fsfe.org)
* `activities/android` for [freeyourandroid.org](http://freeyourandroid.org)
* `activities/ilovefs` for [ilovefs.org](http://ilovefs.org)
* `drm.info` for [drm.info](http://drm.info)
* `pdfreaders.org` for [pdfreaders.org](http://pdfreaders.org)
* [test.fsfe.org](https://test.fsfe.org) is fsfe.org built from the test branch of this repository
### Important folders
Notable top level directories are:
- `build`: Mostly custom Bash and XSL scripts to build the website
- `global`: Globally used data files and modules, also the static translated strings.
- `tools`: Contains miscellaneous XML, XSL, and SH files.
* `build`: Mostly custom Bash and XSL scripts to build the website
* `global`: Globally used data files and modules, also the static translated strings.
* `tools`: Contains miscellaneous XML, XSL, and SH files.
And of course the different website folders.
And here are dome notable directories inside the folder for the main webpage, fsfe.org.
- `about`: Information about the FSFE itself, its team members etc
- `activities`: All specific FSFE activities
- `at`, `de`, `ee` etc: Folders used for the FSFE country teams
- `cgi-bin`: Our very few CGI scripts
- `error`: Custom 4xx and 5xx error pages
- `events`: Files for our events, ordered by year
- `freesoftware`: More timeless pages explaining Free Software and related topics
- `graphics`: Icons, pictures and logos
- `internal`: Forms used mostly by FSFE staff for internal processes
- `look`: CSS and other style files
- `news`: Files for news articles, press releases, and newsletters ordered by year
- `order`: Our web shop
- `scripts`: JavaScript files used on our pages
- `tags`: Files necessary to display used tags throughout the website. Mostly automatically generated
* `about`: Information about the FSFE itself, its team members etc
* `activities`: All specific FSFE activities
* `at`, `de`, `ee` etc: Folders used for the FSFE country teams
* `cgi-bin`: Our very few CGI scripts
* `error`: Custom 4xx and 5xx error pages
* `events`: Files for our events, ordered by year
* `freesoftware`: More timeless pages explaining Free Software and related topics
* `graphics`: Icons, pictures and logos
* `internal`: Forms used mostly by FSFE staff for internal processes
* `look`: CSS and other style files
* `news`: Files for news articles, press releases, and newsletters ordered by year
* `order`: Our web shop
* `scripts`: JavaScript files used on our pages
* `tags`: Files necessary to display used tags throughout the website. Mostly automatically generated
## Contribute
@ -76,84 +72,38 @@ You can see the current status of translation progress of fsfe.org at [status.fs
## Build
There are two ways to build and develop the directory locally. Initial builds of the webpages may take ~12 minutes, but subsequent builds should be much faster. Using the `--languages` flag to avoid building all supported languages can make this much faster. The `--sites` flag allows for building only some of the sites in this repo, which can also provide a speed boost to the developer experience. Run `./build.py --help` for more information.
There are two ways to build and develop the directory locally. Initial builds of the webpages may take ~40 minutes, but subsequent builds should be much faster. Using the `--languages` flag to avoid building all supported languages can make this much faster. See ./build/README.md for more information.
Alterations to build scripts or the files used site-wide will result in near full rebuilds.
### Native
We can either install the required dependencies manually using our preferred package manager. If you are a nix use one can run `nix-shell` to enter a shell with the required build dependencies.
We can either install the required dependencies manually using our preferred package manager. If you are a nix use one can run `nix-shell` to enter a shell with the required build dependencies, with the python `virtualenv` already installed and activated.
If installing manually, the required binary names are
The required binary names are
```
python3 pip
realpath rsync xsltproc xmllint sed find egrep grep wc make tee date iconv wget shuf python3
```
Also needed are the libraries
The package names for Debian, are
```
libxml2 libxslt
```
Then, we must activate a Python virtual env and install the python dependencies.
```
python -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
bash bash-completion coreutils diffutils findutils inotify-tools libxml2-utils libxslt make procps python3 rsync
```
After getting the dependencies one way or another we can actually build and serve the pages.
The pages can be built and served by running `./build.py`. Try `--help` for more information. The simple web server used lacks the features of `apache` which used on the FSFE web servers. This is why no index is automatically selected for each directory and other behaviours.
The pages can be built and served by running `./build.sh`. Try `--help` for more information. The simple web server used lacks the features of `apache` which used on the FSFE web servers. This is why no index is automatically selected form and directory and other behaviors.
### Docker
Simply running `docker compose run --service-ports build --serve` should build the webpages and make them available over localhost.
Some more explanation: we are essentially just using docker as a way to provide dependencies and then running the build script. All flags after `build` are passed to `build.sh`. The `service-ports` flag is required to open ports from the container for serving the output, not needed if not using the `--serve` flag of the build script.
The docker build process is in some ways designed for deployment. This means that it expects some environment variables to be set to function. Namely, it will try and load ssh credentials and git credentials, and docker does not support providing default values to these.
Please note that files generated during the build process using docker are owned by root. This does not cause issues unless you with to manually alter the output or switch to native building instead of docker.
So, to stub out this functionality, please set the environment variables
`KEY_PRIVATE KEY_PASSWORD GIT_TOKEN` to equal `none` when running docker. One can set them for the shell session, an example in bash is seen below.
If you wish to switch to native building after using docker, you must use `sudo git clean -fdx` to remove the files generated using docker.
```
export KEY_PRIVATE=none;
export KEY_PASSWORD=none;
export GIT_TOKEN=none;
```
One can then run Docker commands like `docker compose ...`.
Alternatively one can prefix the Docker commands with the required variables, like so
```
KEY_PRIVATE=none KEY_PASSWORD=none GIT_TOKEN=none docker compose
```
Once your preferred method has been chosen, simply running `docker compose run --service-ports build --serve` should build the webpages and make them available over localhost.
Some more explanation: we are essentially just using docker as a way to provide dependencies and then running the build script. All flags after `build` are passed to `build.py`. The `service-ports` flag is required to open ports from the container for serving the output, not needed if not using the `--serve` flag of the build script.
## Githooks
The repo contains some highly recommended githooks that one should enable. They check for several kinds of common issues. They are also run in CI, so enabling them locally speeds the development feedback loop.
One can enable them locally using
```sh
rm ./.git/hooks -r # remove git's sample hooks
ln -s ../tools/githooks/ .git/hooks # link our hooks to the right dir
```
The hooks have some extra dependencies, namely
```
git xmllint sed file grep bash perl mediainfo curl mktemp
```
The provided `nix-shell` includes the needed packages. Otherwise, they can be installed manually.
## Testing
While most small changes can be tested adequately by building locally some larger changes, particularly ones relating to the order pages, event registration and other forms may require more integrated testing. This can be achieved using the `test` branch. This branch is built and served in the same way as the main site, [fsfe.org](https://fsfe.org). The built version of the `test` branch may be viewed at [test.fsfe.org](https://test.fsfe.org).
## Status Viewing
The status of builds of [fsfe.org](https://fsfe.org) and [test.fsfe.org](https://test.fsfe.org) can be viewed at [status.fsfe.org](https://status.fsfe.org)

169
build.py
View File

@ -1,169 +0,0 @@
#!/usr/bin/env python3
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import argparse
import logging
import multiprocessing
import os
import sys
from pathlib import Path
from build.lib.misc import lang_from_filename
from build.phase0.full import full
from build.phase0.global_symlinks import global_symlinks
from build.phase0.prepare_early_subdirectories import prepare_early_subdirectories
from build.phase1.run import phase1_run
from build.phase2.run import phase2_run
from build.phase3.serve_websites import serve_websites
from build.phase3.stage_to_target import stage_to_target
logger = logging.getLogger(__name__)
def parse_arguments() -> argparse.Namespace:
"""
Parse the arguments of the website build process
"""
parser = argparse.ArgumentParser(
description="Python script to handle building of the fsfe webpage"
)
parser.add_argument(
"--full",
help="Force a full rebuild of all webpages.",
action="store_true",
)
parser.add_argument(
"--languages",
help="Languages to build website in.",
default=[],
type=lambda input: input.split(","),
)
parser.add_argument(
"--log-level",
type=str,
default="INFO",
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
help="Set the logging level (default: INFO)",
)
parser.add_argument(
"--processes",
help="Number of processes to use when building the website",
type=int,
default=multiprocessing.cpu_count(),
)
parser.add_argument(
"--serve",
help="Serve the webpages after rebuild",
action="store_true",
)
parser.add_argument(
"--sites",
help="What site directories to build",
default=list(filter(lambda path: path.is_dir(), Path().glob("?*.??*"))),
type=lambda input: list(map(lambda site: Path(site), input.split(","))),
)
parser.add_argument(
"--stage",
help="Force the use of an internal staging directory.",
action="store_true",
)
parser.add_argument(
"--target",
help="Final dirs for websites to be build to. Can be a single path, or a comma separated list of valid rsync targets. Supports custom rsynx extension for specifying ports for ssh targets, name@host:path?port.",
type=str,
default="./output/final",
)
args = parser.parse_args()
return args
def main(args: argparse.Namespace):
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=args.log_level,
)
logger.debug(args)
with multiprocessing.Pool(args.processes) as pool:
logger.info("Starting phase 0 - Global Conditional Setup")
# TODO Should also be triggered whenever any build python file is changed
if args.full:
full()
# -----------------------------------------------------------------------------
# Create XML symlinks
# -----------------------------------------------------------------------------
# After this step, the following symlinks will exist:
# * global/data/texts/.texts.<lang>.xml for each language
# * global/data/topbanner/.topbanner.<lang>.xml for each language
# Each of these symlinks will point to the corresponding file without a dot at
# the beginning of the filename, if present, and to the English version
# otherwise. This symlinks make sure that phase 2 can easily use the right file
# for each language
global_symlinks(
args.languages
if args.languages
else list(
map(lambda path: path.name, Path(".").glob("global/languages/??"))
),
pool,
)
# Early subdirs
# for subdir actions that need to be performed very early in the build process. Do not get access to languages to be built in, and other benefits of being ran later.
prepare_early_subdirectories(
Path(),
args.processes,
)
stage_required = any(
[args.stage, "@" in args.target, ":" in args.target, "," in args.target]
)
working_target = Path("./output/stage" if stage_required else args.target)
# the two middle phases are unconditional, and run on a per site basis
for site in args.sites:
logger.info(f"Processing {site}")
if not site.exists():
logger.critical(f"Site {site} does not exist, exiting")
sys.exit(1)
languages = (
args.languages
if args.languages
else list(
set(
map(
lambda path: lang_from_filename(path),
site.glob("**/*.*.xhtml"),
)
)
)
)
# Processes needed only for subdir stuff
phase1_run(site, languages, args.processes, pool)
phase2_run(site, languages, pool, working_target.joinpath(site))
logger.info("Starting Phase 3 - Global Conditional Finishing")
if stage_required:
stage_to_target(working_target, args.target, pool)
if args.serve:
serve_websites(working_target, 2000, 100)
if __name__ == "__main__":
"""
Main process of the website builder
"""
# Change to the dir the script is in.
os.chdir(os.path.dirname(__file__))
args = parse_arguments()
main(args)

48
build.sh Executable file
View File

@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat <<-EOF
# build.sh Usage
## General
This script is a wrapper script over ./build/build_main.sh that provides nicer option names, and the options to serve the files.
For documentation on the build script itself see ./build/README.md
## Flags
### -f | --full
Perform a full rebuild of the webpages.
### -s | --serve
Serve the build webpages over localhost.
### --
Everything after this is passed directly to build_main.
See ./build/README.md for valid options.
EOF
exit 1
}
command="build_run"
serve=""
extra_args=""
while [ "$#" -gt 0 ]; do
case "$1" in
--full | -f)
command="build_into" && shift 1
;;
--serve | -s)
serve="true" && shift 1
;;
--)
shift 1
while [ "$#" -gt 0 ]; do
extra_args+="$1 "
shift 1
done
;;
*)
usage
;;
esac
done
mkdir -p ./output
./build/build_main.sh "$command" ./output/final --statusdir ./output/final/status.fsfe.org/fsfe.org/data $extra_args
if [[ "$serve" ]]; then
python3 ./serve-websites.py
fi

42
build/README.md Normal file
View File

@ -0,0 +1,42 @@
# Main Commands
Note that targets takes a comma separated list of valid rsync targets, and hence supports ssh targets. If targeting more than one directory one must use the --stage-dir flag documented below.
## build_main.sh [options] build_run "targets"
Perform the page build. Write output to targets. The source directory is determined from the build scripts own location.
## build_main.sh [options] git_build_into "targets"
Update repo to latest version of upstream branch and then perform a standard build. Write output to targets. The source directory is determined from the build scripts own location.
## build_main.sh [options] build_into "targets"
Perform a full rebuild of the webpages, removing all cached files. Write output to targets. The source directory is determined from the build scripts own location.
# Internal Commands
It is unlikely that you will need to directly call these commands, but they are documented here never the less.
## build_main.sh [options] build_xmlstream "file.xhtml"
Compile an xml stream from the specified file, additional sources will be determined and included automatically. The stream is suitable for being passed into xsltproc.
## build_main.sh [options] process_file "file.xhtml" [processor.xsl]
Generate output from an xhtml file as if it would be processed during the
build. Output is written to STDOUT and can be redirected as desired.
If a xslt file is not given, it will be chosen automatically.
## build_main.sh [options] tree_maker [input_dir] "targets"
Generate a set of make rules to build the website contained in input_dir. targets should be the www root of a web server. If input_dir is omitted, it will be the source directory determined from the build scripts location. Note: if targets is set via previous options, and only one parameter is given, then this parameter will be interpreted as input_dir
# OPTIONS
## --source "source_dir"
Force a specific source directory. If not explicitly given source_dir is determined from the build scripts own location. Paths given in .sources files are interpreted as relative to source_dir making this option useful when building a webpage outside of the build scripts "regular" tree.
## --status-dir "status_dir"
A directory to which messages are written. If no status_dir is provided information will be written to stdout. The directory will also be used to store some temporary files, which would otherwise be set up in the system wide temp directory.
## --stage-dir "stage_dir"
Directory used for staging the website builds. The websites are first build into this directory, then copied to each targets.
## --build-env "selection"
Indicate the current build environment. "selection" can be one of: * "fsfe.org": building https://fsfe.org on the production server * "test.fsfe.org": building https://test.fsfe.org on the production server * "development" (default): local development build In a local development build, code to dynamically compile the less files into CSS will be included in the HTML output, while in the other environments, the precompiles fsfe.min.css (or valentine.min.css) will be referenced from the generated web pages.
## --languages "languages"
Takes a comma separated list of language shot codes to build the website in. For example, to build the site in English and French only one would use `--languages en,fr`. One of the built languages must be English.
## --help
Show this README.

View File

@ -1,6 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# __init__.py is a special Python file that allows a directory to become
# a Python package so it can be accessed using the 'import' statement.

94
build/arguments.sh Executable file
View File

@ -0,0 +1,94 @@
#!/usr/bin/env bash
[ -z "$inc_misc" ] && . "$basedir/build/misc.sh"
if [ -z "$inc_arguments" ]; then
inc_arguments=true
basedir="$(realpath "${0%/*}/..")"
while [ "$#" -gt 0 ]; do
case "$1" in
-s | --statusdir | --status-dir)
[ "$#" -gt 0 ] && shift 1 && statusdir="$1"
;;
--source)
[ "$#" -gt 0 ] && shift 1 && basedir="$1"
;;
--stage | --stagedir | --stage-dir)
[ "$#" -gt 0 ] && shift 1 && stagedir="$1"
;;
--build-env)
[ "$#" -gt 0 ] && shift 1 && build_env="$1"
;;
--languages)
[ "$#" -gt 0 ] && shift 1 && languages="$1"
;;
-h | --help)
command="help"
;;
build_into)
command="$1$command"
[ "$#" -gt 0 ] && shift 1 && target="$1"
;;
git_build_into)
command="$1$command"
[ "$#" -gt 0 ] && shift 1 && target="$1"
;;
build_run)
command="$1$command"
[ "$#" -gt 0 ] && shift 1 && target="$1"
;;
build_xmlstream)
command="$1$command"
[ "$#" -gt 0 ] && shift 1 && workfile="$1"
;;
tree_maker)
command="$1$command"
[ -n "$target" -o -n "$3" ] && shift 1 && tree="$1"
shift 1
[ -n "$1" ] && target="$1"
;;
process_file)
command="$1$command"
[ "$#" -gt 0 ] && shift 1 && workfile="$1"
[ "$#" -gt 0 ] && shift 1 && processor="$1"
;;
*)
print_error "Unknown option: $1"
exit 1
;;
esac
[ "$#" -gt 0 ] && shift 1
done
tree="${tree:-$basedir}"
stagedir="${stagedir:-$target}"
readonly tree="${tree:+$(realpath "$tree")}"
readonly stagedir="${stagedir:+$(realpath "$stagedir")}"
readonly basedir="${basedir:+$(realpath "$basedir")}"
readonly build_env="${build_env:-development}"
readonly command
if [ "$languages" ]; then
readonly languages="$(echo "$languages" | tr ',' ' ')"
else
readonly languages="$(ls -xw0 "${basedir}/global/languages")"
fi
if [ "$stagedir" != "$target" ] && printf %s "$target" | egrep -q '^.+@.+:(.+)?$'; then
readonly target
else
readonly target="${target:+$(realpath "$target")}"
fi
case "$command" in
build_into) [ -z "$target" ] && die "Missing destination directory" ;;
git_build_into) [ -z "$target" ] && die "Missing destination directory" ;;
build_run) [ -z "$target" ] && die "Missing destination directory" ;;
process_file) [ -z "$workfile" ] && die "Need at least input file" ;;
build_xmlstream) [ -z "$workfile" ] && die "Missing xhtml file name" ;;
tree_maker) [ -z "$target" ] && die "Missing target location" ;;
*help*)
cat "$basedir/build/README.md"
exit 0
;;
*) die "Urecognised command or no command given" ;;
esac
fi

82
build/build_main.sh Executable file
View File

@ -0,0 +1,82 @@
#!/usr/bin/env bash
# Dependency check function
check_dependencies() {
depends="$@"
deperrors=''
for depend in $depends; do
if ! which "$depend" >/dev/null 2>&1; then
deperrors="$depend $deperrors"
fi
done
if [ -n "$deperrors" ]; then
printf '\033[1;31m'
cat <<-EOF
The build script depends on some other programs to function.
Not all of those programs could be located on your system.
Please use your package manager to install the following programs:
EOF
printf '\n\033[0;31m%s\n' "$deperrors"
exit 1
fi 1>&2
}
# Check dependencies for all kinds of build envs (e.g. development, fsfe.org)
check_dependencies realpath rsync xsltproc xmllint sed find egrep grep wc make tee date iconv wget shuf python3
if ! make --version | grep -q "GNU Make 4"; then
echo "The build script requires GNU Make 4.x"
exit 1
fi
basedir="${0%/*}/.."
[ -z "$inc_misc" ] && . "$basedir/build/misc.sh"
readonly start_time="$(date +%s)"
. "$basedir/build/arguments.sh"
# Check special dependencies for (test.)fsfe.org build server
if [ "$build_env" == "fsfe.org" ] || [ "$build_env" == "test.fsfe.org" ]; then
check_dependencies lessc
fi
statusdir="${statusdir/#\~/$HOME}"
if [ -n "$statusdir" ]; then
mkdir -p "$statusdir"
[ ! -w "$statusdir" -o ! -d "$statusdir" ] &&
die "Unable to set up status directory in \"$statusdir\",\n" \
"either select a status directory that exists and is writable,\n" \
"or run the build script without output to a status directory"
fi
readonly statusdir="${statusdir:+$(realpath "$statusdir")}"
buildpids=$(
ps -eo command |
egrep "[s]h ${0} .*" |
wc -l
)
if [ $command = "build_into" -o $command = "git_build_into" ] && [ "$buildpids" -gt 2 ]; then
debug "build script is already running"
exit 1
fi
[ -z "$inc_filenames" ] && . "$basedir/build/filenames.sh"
[ -z "$inc_buildrun" ] && . "$basedir/build/buildrun.sh"
[ -z "$inc_makerules" ] && . "$basedir/build/makerules.sh"
[ -z "$inc_processor" ] && . "$basedir/build/processor.sh"
[ -z "$inc_scaffold" ] && . "$basedir/build/scaffold.sh"
case "$command" in
git_build_into) if [ -f "${statusdir}/full_build" ]; then
debug "discovered flag file, performing full build"
rm "${statusdir}/full_build"
build_into
else
git_build_into
fi ;;
build_into) build_into ;;
build_run) buildrun ;;
process_file) process_file "$workfile" "$processor" ;;
build_xmlstream) build_xmlstream "$(get_shortname "$workfile")" "$(get_language "$workfile")" ;;
tree_maker) tree_maker "$tree" "$target" ;;
esac

140
build/buildrun.sh Executable file
View File

@ -0,0 +1,140 @@
#!/usr/bin/env bash
inc_buildrun=true
[ -z "$inc_makerules" ] && . "$basedir/build/makerules.sh"
[ -z "$inc_logging" ] && . "$basedir/build/logging.sh"
[ -z "$inc_misc" ] && . "$basedir/build/misc.sh"
match() {
printf %s "$1" | egrep -q "$2"
}
dir_maker() {
# set up directory tree for output
# optimise by only issuing mkdir commands
# for leaf directories
input="${1%/}"
output="${2%/}"
curpath="$output"
find "$input" -depth -type d \
-regex "$input/[a-z\.]+\.[a-z]+\(/.*\)?" \
-printf '%P\n' |
while read -r filepath; do
oldpath="$curpath"
curpath="$output/$filepath/"
match "$oldpath" "^$curpath" || mkdir -p "$curpath"
done
}
# The actual build
buildrun() {
set -o pipefail
printf %s "$start_time" >"$(logname start_time)"
ncpu="$(grep -c ^processor /proc/cpuinfo)"
[ -f "$(logname lasterror)" ] && rm "$(logname lasterror)"
[ -f "$(logname debug)" ] && rm "$(logname debug)"
{
echo "Starting phase 1" &&
make --silent --directory="$basedir" build_env="${build_env}" languages="$languages" 2>&1 &&
echo "Finishing phase 1" ||
die "Error during phase 1"
} | t_logstatus phase_1 || exit 1
dir_maker "$basedir" "$stagedir" || exit 1
forcelog Makefile
{
tree_maker "$basedir" "$stagedir" 2>&1 ||
die "Error during phase 2 Makefile generation"
} >"$(logname Makefile)" || exit 1
{
echo "Starting phase 2" &&
make --silent --jobs=$ncpu --file="$(logname Makefile)" 2>&1 &&
echo "Finishing phase 2" ||
die "Error during phase 2"
} | t_logstatus phase_2 || exit 1
if [ "$stagedir" != "$target" ]; then
# rsync issues a "copying unsafe symlink" message for each of the "unsafe"
# symlinks which we copy while rsyncing. These messages are issued even if
# the files have not changed and clutter up the output, so we filter them
# out.
{
for destination in ${target//,/ }; do
echo "Syncing files to $(echo "$destination" | grep -Po "(?<=@)[^:]+")"
rsync -av --copy-unsafe-links --del --exclude "status.fsfe.org/*fsfe.org/data" "$stagedir/" "$destination/" | grep -v "copying unsafe symlink"
done
} | t_logstatus stagesync
fi
date +%s >"$(logname end_time)"
if [ -n "$statusdir" ]; then
(
cd "$statusdir"/..
./index.cgi | tail -n+3 >"$statusdir"/status_$(date +%s).html
)
fi
}
# Update git (try 3x) and then do an actual build
git_build_into() {
forcelog GITchanges
GITchanges="$(logname GITchanges)"
forcelog GITerrors
GITerrors="$(logname GITerrors)"
gitterm=1
i=0
while [[ ($gitterm -ne 0) && ($i -lt 3) ]]; do
((i++))
git -C "$basedir" pull >"$GITchanges" 2>"$GITerrors"
gitterm="$?"
if [ $gitterm -ne 0 ]; then
debug "Git pull unsuccessful. Trying again in a few seconds."
sleep $(shuf -i 10-30 -n1)
fi
done
if [ "$gitterm" -ne 0 ]; then
debug "Three git pulls failed, hard resetting and repulling"
git -C "$basedir" reset --hard HEAD~50 >"$GITchanges" 2>"$GITerrors"
git -C "$basedir" pull >>"$GITchanges" 2>>"$GITerrors"
gitterm="$?"
fi
if [ "$gitterm" -ne 0 ]; then
die "GIT reported the following problem:\n$(cat "$GITerrors")"
fi
if egrep '^Already up[ -]to[ -]date' "$GITchanges"; then
debug "No changes to GIT:\n$(cat "$GITchanges")"
# Exit status should only be 0 if there was a successful build.
# So set it to 1 here.
exit 1
fi
logstatus GITlatest <"$GITchanges"
buildrun
}
# Clean up everything and then do an actual (full) build
build_into() {
# Clean up source directory.
git -C "${basedir}" clean -dxf --exclude=status.fsfe.org/translations/data
# Remove old stage directory.
rm -rf "${stagedir}"
buildrun
}

16
build/filenames.sh Executable file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env bash
inc_filenames=true
get_language() {
# extract language indicator from a given file name
echo "$(echo "$1" | sed -r 's:^.*\.([a-z]{2})\.[^\.]+$:\1:')"
}
get_shortname() {
# get shortened version of a given file name
# required for internal processing
#echo "$(echo "$1" | sed -r 's:\.[a-z]{2}.xhtml$::')";
echo "${1%.??.xhtml}"
}

View File

@ -1,6 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# __init__.py is a special Python file that allows a directory to become
# a Python package so it can be accessed using the 'import' statement.

View File

@ -1,128 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import subprocess
import sys
from pathlib import Path
import lxml.etree as etree
logger = logging.getLogger(__name__)
def keys_exists(element: dict, *keys: str) -> bool:
"""
Check if *keys (nested) exists in `element` (dict).
"""
if not isinstance(element, dict):
raise AttributeError("keys_exists() expects dict as first argument.")
if len(keys) == 0:
raise AttributeError("keys_exists() expects at least two arguments, one given.")
_element = element
for key in keys:
try:
_element = _element[key]
except KeyError:
return False
return True
def sort_dict(dict: dict) -> dict:
"""
Sort dict by keys
"""
return {key: val for key, val in sorted(dict.items(), key=lambda ele: ele[0])}
def update_if_changed(path: Path, content: str) -> None:
"""
Compare the content of the file at path with the content.
If the file does not exist,
or its contents does not match content,
write content to the file.
"""
if not path.exists() or path.read_text() != content:
logger.debug(f"Updating {path}")
path.write_text(content)
def touch_if_newer_dep(file: Path, deps: list[Path]) -> None:
"""
Takes a filepath , and a list of path of its dependencies.
If any of the dependencies has been altered more recently than the file,
touch the file.
Essentially simple reimplementation of make deps for build targets.
"""
if any(dep.stat().st_mtime > file.stat().st_mtime for dep in deps):
logger.info(f"Touching {file}")
file.touch()
def delete_file(file: Path) -> None:
"""
Delete given file using pathlib
"""
logger.debug(f"Removing file {file}")
file.unlink()
def lang_from_filename(file: Path) -> str:
"""
Get the lang code from a file, where the filename is of format
<name>.XX.<ending>, with xx being the lang code.
"""
lang = file.with_suffix("").suffix.removeprefix(".")
# Lang codes should be the iso 631 2 letter codes, but sometimes we use "nolang" to srop a file being built
if len(lang) != 2 and lang != "nolang":
logger.critical(
f"Language {lang} from file {file} not of correct length, exiting"
)
sys.exit(1)
else:
return lang
def run_command(commands: list) -> str:
result = subprocess.run(
commands,
capture_output=True,
# Get output as str instead of bytes
universal_newlines=True,
)
if result.returncode != 0:
logger.critical(f"Command {commands} failed with error")
logger.critical(result.stderr.strip())
sys.exit(1)
return result.stdout.strip()
def get_version(file: Path) -> int:
"""
Get the version tag of an xhtml|xml file
"""
xslt_tree = etree.parse(Path("build/xslt/get_version.xsl"))
transform = etree.XSLT(xslt_tree)
result = transform(etree.parse(file))
result = str(result).strip()
if result == "":
result = str(0)
logger.debug(f"Got version: {result}")
return int(result)
def get_basepath(file: Path) -> Path:
"""
Return the file with the last two suffixes removed
"""
return file.with_suffix("").with_suffix("")
def get_basename(file: Path) -> str:
"""
Return the name of the file with the last two suffixes removed
"""
return file.with_suffix("").with_suffix("").name

View File

@ -1,246 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import re
from datetime import datetime
from pathlib import Path
import lxml.etree as etree
from build.lib.misc import get_basename, get_version, lang_from_filename
logger = logging.getLogger(__name__)
def _include_xml(file: Path) -> str:
"""
include second level elements of a given XML file
this emulates the behaviour of the original
build script which wasn't able to load top
level elements from any file
"""
work_str = ""
if file.exists():
tree = etree.parse(file)
root = tree.getroot()
# Remove <version> because the filename attribute would otherwise be added
# to this element instead of the actual content element.
for elem in root.xpath("version"):
root.remove(elem)
# Iterate over all elements in root node, add a filename attribute and then append the string to work_str
for elem in root.xpath("*"):
elem.set("filename", get_basename(file))
work_str += etree.tostring(elem, encoding="utf-8").decode("utf-8")
return work_str
def _get_attributes(file: Path) -> str:
"""
get attributes of top level element in a given
XHTML file
"""
work_str = ""
tree = etree.parse(file)
root = tree.getroot()
attributes = root.attrib
for attrib in attributes:
work_str += f'{attrib}="{attributes[attrib]}"\n'
return work_str
def _list_langs(file: Path) -> str:
"""
list all languages a file exists in by globbing up
the shortname (i.e. file path with file ending omitted)
output is readily formatted for inclusion
in xml stream
"""
return "\n".join(
list(
map(
lambda path: (
f'<tr id="{lang_from_filename(path)}">'
+ (
Path(f"global/languages/{lang_from_filename(path)}")
.read_text()
.strip()
)
+ "</tr>"
),
file.parent.glob(f"{get_basename(file)}.??{file.suffix}"),
)
)
)
def _auto_sources(action_file: Path, lang: str) -> str:
"""
import elements from source files, add file name
attribute to first element included from each file
"""
work_str = ""
list_file = action_file.with_stem(
f".{action_file.with_suffix('').stem}"
).with_suffix(".xmllist")
if list_file.exists():
with list_file.open("r") as file:
for path in map(lambda line: Path(line.strip()), file):
path_xml = (
path.with_suffix(f".{lang}.xml")
if path.with_suffix(f".{lang}.xml").exists()
else path.with_suffix(".en.xml")
)
work_str += _include_xml(path_xml)
return work_str
def _build_xmlstream(infile: Path):
"""
assemble the xml stream for feeding into xsltproc
the expected shortname and language flag indicate
a single xhtml page to be built
"""
# TODO
# Ideally this would use lxml to construct an object instead of string templating.
# Should be a little faster, and also guarantees that its valid xml
logger.debug(f"infile: {infile}")
shortname = infile.with_suffix("")
lang = lang_from_filename(infile)
glob = infile.parent.joinpath(f"{get_basename(infile)}.??{infile.suffix}")
logger.debug(f"formed glob: {glob}")
lang_lst = list(
infile.parent.glob(f"{get_basename(infile)}.??{infile.suffix}"),
)
logger.debug(f"file lang list: {lang_lst}")
original_lang = (
"en"
if infile.with_suffix("").with_suffix(f".en{infile.suffix}").exists()
else sorted(
infile.parent.glob(f"{get_basename(infile)}.??{infile.suffix}"),
key=get_version,
reverse=True,
)[0]
.with_suffix("")
.suffix.removeprefix(".")
)
topbanner_xml = Path(f"global/data/topbanner/.topbanner.{lang}.xml")
texts_xml = Path(f"global/data/texts/.texts.{lang}.xml")
date = str(datetime.now().date())
# time = str(datetime.now().time())
action_lang = ""
translation_state = ""
if infile.exists():
action_lang = lang
original_version = get_version(
shortname.with_suffix(f".{original_lang}{infile.suffix}")
)
lang_version = get_version(shortname.with_suffix(f".{lang}{infile.suffix}"))
translation_state = (
"up-to-date"
if (original_version <= lang_version)
else (
"very-outdated"
if (original_version - 3 >= lang_version)
else "outdated"
)
)
else:
action_lang = original_lang
translation_state = "untranslated"
action_file = shortname.with_suffix(f".{action_lang}{infile.suffix}")
logger.debug(f"action_file: {action_file}")
result_str = f"""
<buildinfo
date="{date}"
original="{original_lang}"
filename="/{str(shortname.with_suffix("")).removeprefix("/")}"
fileurl="/{shortname.relative_to(shortname.parts[0]).with_suffix("")}"
dirname="/{shortname.parent}/"
language="{lang}"
translation_state="{translation_state}"
>
<trlist>
{_list_langs(infile)}
</trlist>
<topbanner>
{_include_xml(topbanner_xml)}
</topbanner>
<textsetbackup>
{_include_xml(Path("global/data/texts/texts.en.xml"))}
</textsetbackup>
<textset>
{_include_xml(texts_xml)}
</textset>
<document
language="{action_lang}"
{_get_attributes(action_file)}
>
<set>
{_auto_sources(action_file, lang)}
</set>
{_include_xml(action_file)}
</document>
</buildinfo>
"""
return result_str
def process_file(infile: Path, processor: Path) -> str:
"""
Process a given file using the correct xsl sheet
"""
logger.debug(f"Processing {infile}")
lang = lang_from_filename(infile)
xmlstream = _build_xmlstream(infile)
xslt_tree = etree.parse(processor.resolve())
transform = etree.XSLT(xslt_tree)
result = str(transform(etree.XML(xmlstream)))
# And now a bunch of regexes to fix some links.
# xx is the language code in all comments
# TODO
# Probably a faster way to do this
# Maybe iterating though all a tags with lxml?
# Once buildxmlstream generates an xml object that should be faster.
# Remove https://fsfe.org (or https://test.fsfe.org) from the start of all
result = re.sub(
r"""href\s*=\s*("|')(https?://(test\.)?fsfe\.org)([^>])\1""",
r"""href=\1\3\1""",
result,
flags=re.MULTILINE | re.IGNORECASE,
)
# Change links from /foo/bar.html into /foo/bar.xx.html
# Change links from foo/bar.html into foo/bar.xx.html
# Same for .rss and .ics links
result = re.sub(
r"""href\s*=\s*("|')(/?([^:>]+/)?[^:/.]+\.)(html|rss|ics)(#[^>]*)?\1""",
rf"""href=\1\2{lang}.\4\5\1""",
result,
flags=re.MULTILINE | re.IGNORECASE,
)
# Change links from /foo/bar/ into /foo/bar/index.xx.html
# Change links from foo/bar/ into foo/bar/index.xx.html
result = re.sub(
r"""href\s*=\s*("|')(/?[^:>]+/)\1""",
rf"""href=\1\2index.{lang}.html\1""",
result,
flags=re.MULTILINE | re.IGNORECASE,
)
return result

43
build/logging.sh Executable file
View File

@ -0,0 +1,43 @@
#!/usr/bin/env bash
inc_logging=true
logname() {
name="$1"
if [ -w "$statusdir" ] && touch "$statusdir/$name"; then
echo "$statusdir/$name"
elif echo "$forcedlog" | egrep -q "^${name}=.+"; then
echo "$forcedlog" |
sed -rn "s;^${name}=;;p"
else
echo /dev/null
fi
}
forcelog() {
name="$1"
[ "$(logname "$name")" = "/dev/null" ] &&
forcedlog="$forcedlog\n${name}=$(mktemp -t w3bldXXXXXXXXX --suffix $$)"
}
[ -z "$USER" ] && USER="$(whoami)"
trap "trap - 0 2 3 6 9 15; find \"${TMPDIR:-/tmp}/\" -maxdepth 1 -user \"$USER\" -name \"w3bld*$$\" -delete" 0 2 3 6 9 15
logstatus() {
# pipeline atom to write data streams into a log file
tee "$(logname "$1")"
}
t_logstatus() {
# pipeline atom to write data streams into a log file
while read line; do
printf "[$(date +%T)] %s\n" "$line"
done | logstatus "$@"
}
logappend() {
# pipeline atom to write data streams into a log file
tee -a "$(logname "$1")"
}

235
build/makerules.sh Executable file
View File

@ -0,0 +1,235 @@
#!/usr/bin/env bash
inc_makerules=true
tree_maker() {
# walk through file tree and issue Make rules according to file type
input="$(realpath "$1")"
output="$(realpath "$2")"
cat <<EOF
# -----------------------------------------------------------------------------
# Makefile for FSFE website build, phase 2
# -----------------------------------------------------------------------------
.PHONY: all
.DELETE_ON_ERROR:
.SECONDEXPANSION:
PROCESSOR = "$basedir/build/process_file.sh"
PROCFLAGS = --build-env "${build_env}" --source "$basedir"
INPUTDIR = $input
OUTPUTDIR = $output
STATUSDIR = $statusdir
LANGUAGES = $languages
# -----------------------------------------------------------------------------
# Build .html files from .xhtml sources
# -----------------------------------------------------------------------------
# All .xhtml source files
HTML_SRC_FILES := \$(shell find "\$(INPUTDIR)" \
-name '*.??.xhtml' \
-not -path '\$(INPUTDIR)/.git/*' \
)
# All basenames of .xhtml source files (without .<lang>.xhtml ending)
# Note: \$(sort ...) is used to remove duplicates
HTML_SRC_BASES := \$(sort \$(basename \$(basename \$(HTML_SRC_FILES))))
# All directories containing .xhtml source files
HTML_SRC_DIRS := \$(sort \$(dir \$(HTML_SRC_BASES)))
# The same as above, but moved to the output directory
HTML_DST_BASES := \$(patsubst \$(INPUTDIR)/%,\$(OUTPUTDIR)/%,\$(HTML_SRC_BASES))
# List of .<lang>.html files to build
HTML_DST_FILES := \$(foreach base,\$(HTML_DST_BASES),\$(foreach lang,\$(LANGUAGES),\$(base).\$(lang).html))
# .xmllist file used to build a html file
XMLLIST_DEP = \$(wildcard \$(INPUTDIR)/\$(dir \$*).\$(notdir \$*).xmllist)
# .xsl file used to build a html file
XSL_DEP = \$(firstword \$(wildcard \$(INPUTDIR)/\$*.xsl) \$(INPUTDIR)/\$(dir \$*).default.xsl)
all: \$(HTML_DST_FILES)
EOF
for lang in ${languages}; do
cat <<EOF
\$(filter %.${lang}.html,\$(HTML_DST_FILES)): \$(OUTPUTDIR)/%.${lang}.html: \$(INPUTDIR)/%.*.xhtml \$\$(XMLLIST_DEP) \$\$(XSL_DEP) \$(INPUTDIR)/global/data/texts/.texts.${lang}.xml \$(INPUTDIR)/global/data/texts/texts.en.xml \$(INPUTDIR)/global/data/topbanner/.topbanner.${lang}.xml
echo "* Building \$*.${lang}.html"
\${PROCESSOR} \${PROCFLAGS} process_file "\$(INPUTDIR)/\$*.${lang}.xhtml" > "\$@"
EOF
done
cat <<EOF
# -----------------------------------------------------------------------------
# Create index.* symlinks
# -----------------------------------------------------------------------------
# All .xhtml source files with the same name as their parent directory
INDEX_SRC_FILES := \$(wildcard \$(foreach directory,\$(HTML_SRC_DIRS),\$(directory)\$(notdir \$(directory:/=)).??.xhtml))
# All basenames of .xhtml source files with the same name as their parent
# directory
INDEX_SRC_BASES := \$(sort \$(basename \$(basename \$(INDEX_SRC_FILES))))
# All directories containing .xhtml source files with the same name as their
# parent directory (that is, all directories in which index files should be
# created)
INDEX_SRC_DIRS := \$(dir \$(INDEX_SRC_BASES))
# The same as above, but moved to the output directory
INDEX_DST_DIRS := \$(patsubst \$(INPUTDIR)/%,\$(OUTPUTDIR)/%,\$(INDEX_SRC_DIRS))
# List of index.<lang>.html symlinks to create
INDEX_DST_LINKS := \$(foreach base,\$(INDEX_DST_DIRS),\$(foreach lang,\$(LANGUAGES),\$(base)index.\$(lang).html))
all: \$(INDEX_DST_LINKS)
EOF
for lang in ${languages}; do
cat <<EOF
\$(filter %/index.${lang}.html,\$(INDEX_DST_LINKS)): \$(OUTPUTDIR)/%/index.${lang}.html:
echo "* Creating symlink \$*/index.${lang}.html"
ln -sf "\$(notdir \$*).${lang}.html" "\$@"
EOF
done
cat <<EOF
# -----------------------------------------------------------------------------
# Create symlinks from file.<lang>.html to file.html.<lang>
# -----------------------------------------------------------------------------
# List of .html.<lang> symlinks to create
HTML_DST_LINKS := \$(foreach base,\$(HTML_DST_BASES) \$(addsuffix index,\$(INDEX_DST_DIRS)),\$(foreach lang,\$(LANGUAGES),\$(base).html.\$(lang)))
all: \$(HTML_DST_LINKS)
EOF
for lang in ${languages}; do
cat <<EOF
\$(OUTPUTDIR)/%.html.${lang}:
echo "* Creating symlink \$*.html.${lang}"
ln -sf "\$(notdir \$*).${lang}.html" "\$@"
EOF
done
cat <<EOF
# -----------------------------------------------------------------------------
# Build .rss files from .xhtml sources
# -----------------------------------------------------------------------------
# All .rss.xsl scripts which can create .rss output
RSS_SRC_SCRIPTS := \$(shell find "\$(INPUTDIR)" \
-name '*.rss.xsl' \
-not -path '\$(INPUTDIR)/.git/*' \
)
# All basenames of .xhtml source files from which .rss files should be built
RSS_SRC_BASES := \$(sort \$(basename \$(basename \$(RSS_SRC_SCRIPTS))))
# The same as above, but moved to the output directory
RSS_DST_BASES := \$(patsubst \$(INPUTDIR)/%,\$(OUTPUTDIR)/%,\$(RSS_SRC_BASES))
# List of .<lang>.rss files to build
RSS_DST_FILES := \$(foreach base,\$(RSS_DST_BASES),\$(foreach lang,\$(LANGUAGES),\$(base).\$(lang).rss))
all: \$(RSS_DST_FILES)
EOF
for lang in ${languages}; do
cat <<EOF
\$(OUTPUTDIR)/%.${lang}.rss: \$(INPUTDIR)/%.*.xhtml \$\$(XMLLIST_DEP) \$(INPUTDIR)/%.rss.xsl \$(INPUTDIR)/global/data/texts/.texts.${lang}.xml \$(INPUTDIR)/global/data/texts/texts.en.xml
echo "* Building \$*.${lang}.rss"
\${PROCESSOR} \${PROCFLAGS} process_file "\$(INPUTDIR)/\$*.${lang}.xhtml" "\$(INPUTDIR)/\$*.rss.xsl" > "\$@"
EOF
done
cat <<EOF
# -----------------------------------------------------------------------------
# Build .ics files from .xhtml sources
# -----------------------------------------------------------------------------
# All .ics.xsl scripts which can create .ics output
ICS_SRC_SCRIPTS := \$(shell find "\$(INPUTDIR)" \
-name '*.ics.xsl' \
-not -path '\$(INPUTDIR)/.git/*' \
)
# All basenames of .xhtml source files from which .ics files should be built
ICS_SRC_BASES := \$(sort \$(basename \$(basename \$(ICS_SRC_SCRIPTS))))
# The same as above, but moved to the output directory
ICS_DST_BASES := \$(patsubst \$(INPUTDIR)/%,\$(OUTPUTDIR)/%,\$(ICS_SRC_BASES))
# List of .<lang>.ics files to build
ICS_DST_FILES := \$(foreach base,\$(ICS_DST_BASES),\$(foreach lang,\$(LANGUAGES),\$(base).\$(lang).ics))
all: \$(ICS_DST_FILES)
EOF
for lang in ${languages}; do
cat <<EOF
\$(OUTPUTDIR)/%.${lang}.ics: \$(INPUTDIR)/%.*.xhtml \$\$(XMLLIST_DEP) \$(INPUTDIR)/%.ics.xsl \$(INPUTDIR)/global/data/texts/.texts.${lang}.xml \$(INPUTDIR)/global/data/texts/texts.en.xml
echo "* Building \$*.${lang}.ics"
\${PROCESSOR} \${PROCFLAGS} process_file "\$(INPUTDIR)/\$*.${lang}.xhtml" "\$(INPUTDIR)/\$*.ics.xsl" > "\$@"
EOF
done
cat <<EOF
# -----------------------------------------------------------------------------
# Copy images, docments etc
# -----------------------------------------------------------------------------
# All files which should just be copied over
COPY_SRC_FILES := \$(shell find -L "\$(INPUTDIR)" -type f \
-regex "\$(INPUTDIR)/[a-z\.]+\.[a-z]+/.*" \
-not -name '.drone.yml' \
-not -name '.gitignore' \
-not -name 'README*' \
-not -name 'Makefile' \
-not -name '*.sources' \
-not -name "*.xmllist" \
-not -name '*.xhtml' \
-not -name '*.xml' \
-not -name '*.xsl' \
-not -name '*.nix' \
) \$(INPUTDIR)/fsfe.org/order/data/items.en.xml
# The same as above, but moved to the output directory
COPY_DST_FILES := \$(sort \$(patsubst \$(INPUTDIR)/%,\$(OUTPUTDIR)/%,\$(COPY_SRC_FILES)))
all: \$(COPY_DST_FILES)
\$(COPY_DST_FILES): \$(OUTPUTDIR)/%: \$(INPUTDIR)/%
echo "* Copying file \$*"
rsync -l "\$<" "\$@"
# -----------------------------------------------------------------------------
# Clean up excess files in target directory
# -----------------------------------------------------------------------------
ALL_DST := \$(HTML_DST_FILES) \$(INDEX_DST_LINKS) \$(HTML_DST_LINKS) \$(RSS_DST_FILES) \$(ICS_DST_FILES) \$(COPY_DST_FILES) \$(SOURCE_DST_FILES)
.PHONY: clean
all: clean
clean:
# Write all destination filenames into "manifest" file, one per line
\$(file >\$(STATUSDIR)/manifest)
\$(foreach filename,\$(ALL_DST),\$(file >>\$(STATUSDIR)/manifest,\$(filename)))
sort "\$(STATUSDIR)/manifest" > "\$(STATUSDIR)/manifest.sorted"
find -L "\$(OUTPUTDIR)" -type f -path "\$(STATUSDIR)" -prune \\
| sort \\
| diff - "\$(STATUSDIR)/manifest.sorted" \\
| sed -rn 's;^< ;;p' \\
| while read file; do echo "* Deleting \$\${file}"; rm "\$\${file}"; done
# -----------------------------------------------------------------------------
EOF
}

23
build/misc.sh Executable file
View File

@ -0,0 +1,23 @@
#!/usr/bin/env bash
inc_misc=true
[ -z "$inc_logging" ] && . "$basedir/build/logging.sh"
debug() {
if [ "$#" -ge 1 ]; then
echo "$(date '+%F %T'): $@" | logappend debug >&2
else
logappend debug >&2
fi
}
print_error() {
echo "Error - $@" | logappend lasterror >&2
echo "Run '$0 --help' to see usage instructions" >&2
}
die() {
echo "$(date '+%F %T'): Fatal - $@" | logappend lasterror >&2
date +%s | logstatus end_time
exit 1
}

View File

@ -1,20 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
from build.lib.misc import run_command
logger = logging.getLogger(__name__)
def full() -> None:
"""
Git clean the repo to remove all cached artifacts
Excluded the root .venv repo, as removing it mid build breaks the build, obviously
"""
logger.info("Performing a full rebuild, git cleaning")
run_command(
["git", "clean", "-fdx", "--exclude", "/.venv"],
)

View File

@ -1,39 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from itertools import product
from pathlib import Path
logger = logging.getLogger(__name__)
def _do_symlinking(type: str, lang: str) -> None:
"""
Helper function for doing all of the global symlinking that is suitable for multithreading
"""
target = (
Path(f"global/data/{type}/{type}.{lang}.xml")
if Path(f"global/data/{type}/{type}.{lang}.xml").exists()
else Path(f"global/data/{type}/{type}.en.xml")
)
source = Path(f"global/data/{type}/.{type}.{lang}.xml")
if not source.exists():
source.symlink_to(target.relative_to(source.parent))
def global_symlinks(languages: list[str], pool: multiprocessing.Pool) -> None:
"""
After this step, the following symlinks will exist:
* global/data/texts/.texts.<lang>.xml for each language
* global/data/topbanner/.topbanner.<lang>.xml for each language
Each of these symlinks will point to the corresponding file without a dot at
the beginning of the filename, if present, and to the English version
otherwise. This symlinks make sure that phase 2 can easily use the right file
for each language, also as a prerequisite in the Makefile.
"""
logger.info("Creating global symlinks")
types = ["texts", "topbanner"]
pool.starmap(_do_symlinking, product(types, languages))

View File

@ -1,30 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import sys
from pathlib import Path
logger = logging.getLogger(__name__)
def prepare_early_subdirectories(source_dir: Path, processes: int) -> None:
"""
Find any early subdir scripts in subdirectories and run them
"""
logger.info("Preparing Early Subdirectories")
for subdir_path in map(
lambda path: path.parent, source_dir.glob("**/early_subdir.py")
):
logger.info(f"Preparing early subdirectory {subdir_path}")
sys.path.append(str(subdir_path.resolve()))
import early_subdir
early_subdir.run(processes, subdir_path)
# Remove its path from where things can be imported
sys.path.remove(str(subdir_path.resolve()))
# Remove it from loaded modules
sys.modules.pop("early_subdir")
# prevent us from accessing it again
del early_subdir

View File

@ -1,6 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# __init__.py is a special Python file that allows a directory to become
# a Python package so it can be accessed using the 'import' statement.

View File

@ -1,120 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# Build an index for the search engine based on the article titles and tags
import json
import logging
import multiprocessing
from pathlib import Path
import iso639
import lxml.etree as etree
import nltk
from nltk.corpus import stopwords as nltk_stopwords
from build.lib.misc import update_if_changed
logger = logging.getLogger(__name__)
def _find_teaser(document: etree.ElementTree) -> str:
"""
Find a suitable teaser for indexation
Get all the paragraphs in <body> and return the first which contains more
than 10 words
:document: The parsed lxml ElementTree document
:returns: The text of the teaser or an empty string
"""
for p in document.xpath("//body//p"):
if p.text and len(p.text.strip().split(" ")) > 10:
return p.text
return ""
def _process_file(file: Path, stopwords: set[str]) -> dict:
"""
Generate the search index entry for a given file and set of stopwords
"""
logger.debug(f"Processing file {file}")
xslt_root = etree.parse(file)
tags = map(
lambda tag: tag.get("key"),
filter(lambda tag: tag.get("key") != "front-page", xslt_root.xpath("//tag")),
)
return {
"url": f"/{file.with_suffix('.html').relative_to(file.parents[-2])}",
"tags": " ".join(tags),
"title": (
xslt_root.xpath("//html//title")[0].text
if xslt_root.xpath("//html//title")
else ""
),
"teaser": " ".join(
w
for w in _find_teaser(xslt_root).strip().split(" ")
if w.lower() not in stopwords
),
"type": "news" if "news/" in str(file) else "page",
# Get the date of the file if it has one
"date": (
xslt_root.xpath("//news[@newsdate]").get("newsdate")
if xslt_root.xpath("//news[@newsdate]")
else None
),
}
def index_websites(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Generate a search index for all sites that have a search/search.js file
"""
logger.info("Creating search indexes")
# Download all stopwords
nltkdir = "./.nltk_data"
nltk.data.path = [nltkdir] + nltk.data.path
nltk.download("stopwords", download_dir=nltkdir, quiet=True)
# Iterate over sites
if source_dir.joinpath("search/search.js").exists():
logger.debug(f"Indexing {source_dir}")
# Get all xhtml files in languages to be processed
# Create a list of tuples
# The first element of each tuple is the file and the second is a set of stopwords for that language
# Use iso639 to get the english name of the language from the two letter iso639-1 code we use to mark files.
# Then if that language has stopwords from nltk, use those stopwords.
files_with_stopwords = map(
lambda file: (
file,
(
set(
nltk_stopwords.words(
iso639.Language.from_part1(
file.suffixes[0].removeprefix(".")
).name.lower()
)
)
if iso639.Language.from_part1(
file.suffixes[0].removeprefix(".")
).name.lower()
in nltk_stopwords.fileids()
else set()
),
),
filter(
lambda file: file.suffixes[0].removeprefix(".") in languages,
source_dir.glob("**/*.??.xhtml"),
),
)
articles = pool.starmap(_process_file, files_with_stopwords)
update_if_changed(
source_dir.joinpath("search/index.js"),
"var pages = " + json.dumps(articles, ensure_ascii=False),
)

View File

@ -1,30 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import sys
from pathlib import Path
logger = logging.getLogger(__name__)
def prepare_subdirectories(
source_dir: Path, languages: list[str], processes: int
) -> None:
"""
Find any subdir scripts in subdirectories and run them
"""
logger.info("Preparing Subdirectories")
for subdir_path in map(lambda path: path.parent, source_dir.glob("**/subdir.py")):
logger.info(f"Preparing subdirectory {subdir_path}")
sys.path.append(str(subdir_path.resolve()))
import subdir
subdir.run(languages, processes, subdir_path)
# Remove its path from where things can be imported
sys.path.remove(str(subdir_path.resolve()))
# Remove it from loaded modules
sys.modules.pop("subdir")
# prevent us from accessing it again
del subdir

View File

@ -1,116 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# -----------------------------------------------------------------------------
# script for FSFE website build, phase 1
# -----------------------------------------------------------------------------
# This script is executed in the root of the source directory tree, and
# creates some .xml and xhtml files as well as some symlinks, all of which
# serve as input files in phase 2. The whole phase 1 runs within the source
# directory tree and does not touch the target directory tree at all.
# -----------------------------------------------------------------------------
import logging
import multiprocessing
from pathlib import Path
from .index_website import index_websites
from .prepare_subdirectories import prepare_subdirectories
from .update_css import update_css
from .update_defaultxsls import update_defaultxsls
from .update_localmenus import update_localmenus
from .update_stylesheets import update_stylesheets
from .update_tags import update_tags
from .update_xmllists import update_xmllists
logger = logging.getLogger(__name__)
def phase1_run(
source_dir: Path,
languages: list[str] or None,
processes: int,
pool: multiprocessing.Pool,
):
"""
Run all the necessary sub functions for phase1.
"""
logger.info("Starting Phase 1 - Setup")
# -----------------------------------------------------------------------------
# Build search index
# -----------------------------------------------------------------------------
# This step runs a Python tool that creates an index of all news and
# articles. It extracts titles, teaser, tags, dates and potentially more.
# The result will be fed into a JS file.
index_websites(source_dir, languages, pool)
# -----------------------------------------------------------------------------
# Update CSS files
# -----------------------------------------------------------------------------
# This step recompiles the less files into the final CSS files to be
# distributed to the web server.
update_css(
source_dir,
)
# -----------------------------------------------------------------------------
# Update XSL stylesheets
# -----------------------------------------------------------------------------
# This step updates (actually: just touches) all XSL files which depend on
# another XSL file that has changed since the last build run. The phase 2
# Makefile then only has to consider the directly used stylesheet as a
# prerequisite for building each file and doesn't have to worry about other
# stylesheets imported into that one.
# This must run before the "dive into subdirectories" step, because in the news
# and events directories, the XSL files, if updated, will be copied for the
# per-year archives.
update_stylesheets(source_dir, pool)
# -----------------------------------------------------------------------------
# Dive into subdirectories
# -----------------------------------------------------------------------------
# Find any makefiles in subdirectories and run them
prepare_subdirectories(source_dir, languages, processes)
# -----------------------------------------------------------------------------
# Create XSL symlinks
# -----------------------------------------------------------------------------
# After this step, each directory with source files for HTML pages contains a
# symlink named .default.xsl and pointing to the default.xsl "responsible" for
# this directory. These symlinks make it easier for the phase 2 Makefile to
# determine which XSL script should be used to build a HTML page from a source
# file.
update_defaultxsls(source_dir, pool)
# -----------------------------------------------------------------------------
# Update local menus
# -----------------------------------------------------------------------------
# After this step, all .localmenu.??.xml files will be up to date.
update_localmenus(source_dir, languages, pool)
# -----------------------------------------------------------------------------
# Update tags
# -----------------------------------------------------------------------------
# After this step, the following files will be up to date:
# * tags/tagged-<tags>.en.xhtml for each tag used. Apart from being
# automatically created, these are regular source files for HTML pages, and
# in phase 2 are built into pages listing all news items and events for a
# tag.
# * tags/.tags.??.xml with a list of the tags used.
update_tags(source_dir, languages, pool)
# -----------------------------------------------------------------------------
# Update XML filelists
# -----------------------------------------------------------------------------
# After this step, the following files will be up to date:
# * <dir>/.<base>.xmllist for each <dir>/<base>.sources as well as for each
# $site/tags/tagged-<tags>.en.xhtml. These files are used in phase 2 to include the
# correct XML files when generating the HTML pages. It is taken care that
# these files are only updated whenever their content actually changes, so
# they can serve as a prerequisite in the phase 2 Makefile.
update_xmllists(source_dir, languages, pool)

View File

@ -1,47 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
from pathlib import Path
import minify
from build.lib.misc import run_command, update_if_changed
logger = logging.getLogger(__name__)
def update_css(
source_dir: Path,
) -> None:
"""
If any less files have been changed, update the css.
Compile less found at website/look/(fsfe.less|valentine.less)
Then minify it, and place it in the expected location for the build process.
"""
logger.info("Updating css")
dir = source_dir.joinpath("look")
if dir.exists():
for name in ["fsfe", "valentine"]:
if dir.joinpath(name + ".less").exists() and (
not dir.joinpath(name + ".min.css").exists()
or any(
map(
lambda path: path.stat().st_mtime
> dir.joinpath(name + ".min.css").stat().st_mtime,
dir.glob("**/*.less"),
)
)
):
logger.info(f"Compiling {name}.less")
result = run_command(
[
"lessc",
str(dir.joinpath(name + ".less")),
],
)
update_if_changed(
dir.joinpath(name + ".min.css"),
minify.string("text/css", result),
)

View File

@ -1,39 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
logger = logging.getLogger(__name__)
def _do_symlinking(directory: Path) -> None:
"""
In each dir, place a .default.xsl symlink pointing to the nearest default.xsl
"""
working_dir = directory
if not directory.joinpath(".default.xsl").exists():
while not working_dir.joinpath("default.xsl").exists():
working_dir = working_dir.parent
directory.joinpath(".default.xsl").symlink_to(
working_dir.joinpath("default.xsl").resolve()
)
def update_defaultxsls(source_dir: Path, pool: multiprocessing.Pool) -> None:
"""
Place a .default.xsl into each directory containing source files for
HTML pages (*.xhtml). These .default.xsl are symlinks to the first
available actual default.xsl found when climbing the directory tree
upwards, it's the xsl stylesheet to be used for building the HTML
files from this directory.
"""
logger.info("Updating default xsl's")
# Get a set of all directories containing .xhtml source files
directories = set(map(lambda path: path.parent, source_dir.glob("**/*.*.xhtml")))
# Do all directories asynchronously
pool.map(_do_symlinking, directories)

View File

@ -1,123 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
import lxml.etree as etree
from build.lib.misc import get_basepath, update_if_changed
logger = logging.getLogger(__name__)
def _write_localmenus(
dir: str, files_by_dir: dict[str, list[Path]], languages: list[str]
) -> None:
"""
Write localmenus for a given directory
"""
# Set of files with no langcode or xhtml extension
base_files = set(
map(
lambda filter_file: get_basepath(filter_file),
files_by_dir[dir],
)
)
for lang in languages:
file = Path(dir).joinpath(f".localmenu.{lang}.xml")
logger.debug(f"Creating {file}")
page = etree.Element("feed")
# Add the subelements
version = etree.SubElement(page, "version")
version.text = "1"
for source_file in filter(
lambda path: path is not None,
map(
lambda base_file: base_file.with_suffix(f".{lang}.xhtml")
if base_file.with_suffix(f".{lang}.xhtml").exists()
else (
base_file.with_suffix(".en.xhtml")
if base_file.with_suffix(".en.xhtml").exists()
else None
),
base_files,
),
):
for localmenu in etree.parse(source_file).xpath("//localmenu"):
etree.SubElement(
page,
"localmenuitem",
set=(
str(localmenu.xpath("./@set")[0])
if localmenu.xpath("./@set") != []
else "default"
),
id=(
str(localmenu.xpath("./@id")[0])
if localmenu.xpath("./@id") != []
else "default"
),
link=(
str(
source_file.with_suffix(".html").relative_to(
source_file.parents[0]
)
)
),
).text = localmenu.text
update_if_changed(
file,
etree.tostring(page, encoding="utf-8").decode("utf-8"),
)
def update_localmenus(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Update all the .localmenu.*.xml files containing the local menus.
"""
logger.info("Updating local menus")
# Get a dict of all source files containing local menus
files_by_dir = {}
for file in filter(
lambda path: "-template" not in str(path),
source_dir.glob("**/*.??.xhtml"),
):
xslt_root = etree.parse(file)
if xslt_root.xpath("//localmenu"):
dir = xslt_root.xpath("//localmenu/@dir")
dir = dir[0] if dir else str(file.parent.relative_to(Path(".")))
if dir not in files_by_dir:
files_by_dir[dir] = set()
files_by_dir[dir].add(file)
for dir in files_by_dir:
files_by_dir[dir] = sorted(list(files_by_dir[dir]))
# If any of the source files has been updated, rebuild all .localmenu.*.xml
dirs = filter(
lambda dir: (
any(
map(
lambda file: (
(not Path(dir).joinpath(".localmenu.en.xml").exists())
or (
file.stat().st_mtime
> Path(dir).joinpath(".localmenu.en.xml").stat().st_mtime
)
),
files_by_dir[dir],
)
)
),
files_by_dir,
)
pool.starmap(
_write_localmenus, map(lambda dir: (dir, files_by_dir, languages), dirs)
)

View File

@ -1,49 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
import re
from pathlib import Path
from lxml import etree
from build.lib.misc import touch_if_newer_dep
logger = logging.getLogger(__name__)
def _update_sheet(file: Path) -> None:
"""
Update a given xsl file if any of its dependant xsl files have been updated
"""
xslt_root = etree.parse(file)
imports = map(
lambda imp: file.parent.joinpath(imp.get("href"))
.resolve()
.relative_to(Path(".").resolve()),
xslt_root.xpath(
"//xsl:import", namespaces={"xsl": "http://www.w3.org/1999/XSL/Transform"}
),
)
touch_if_newer_dep(file, imports)
def update_stylesheets(source_dir: Path, pool: multiprocessing.Pool) -> None:
"""
This script is called from the phase 1 Makefile and touches all XSL files
which depend on another XSL file that has changed since the last build run.
The phase 2 Makefile then only has to consider the
directly used stylesheet as a prerequisite for building each file and doesn't
have to worry about other stylesheets imported into that one.
"""
logger.info("Updating XSL stylesheets")
banned = re.compile(r"(\.venv/.*)|(.*\.default\.xsl$)")
pool.map(
_update_sheet,
filter(
lambda file: re.match(banned, str(file)) is None,
source_dir.glob("**/*.xsl"),
),
)

View File

@ -1,178 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
from xml.sax.saxutils import escape
import lxml.etree as etree
from build.lib.misc import (
get_basepath,
keys_exists,
lang_from_filename,
sort_dict,
update_if_changed,
)
logger = logging.getLogger(__name__)
def _update_tag_pages(site: Path, tag: str, languages: list[str]) -> None:
"""
Update the xhtml pages and xmllists for a given tag
"""
for lang in languages:
tagfile_source = site.joinpath(f"tags/tagged.{lang}.xhtml")
if tagfile_source.exists():
taggedfile = site.joinpath(f"tags/tagged-{tag}.{lang}.xhtml")
content = tagfile_source.read_text().replace("XXX_TAGNAME_XXX", tag)
update_if_changed(taggedfile, content)
def _update_tag_sets(
site: Path,
lang: str,
filecount: dict[str, dict[str, int]],
files_by_tag: dict[str, list[Path]],
tags_by_lang: dict[str, dict[str, str]],
) -> None:
"""
Update the .tags.??.xml tagset xmls for a given tag
"""
# Add uout toplevel element
page = etree.Element("tagset")
# Add the subelements
version = etree.SubElement(page, "version")
version.text = "1"
for section in ["news", "events"]:
for tag in files_by_tag:
count = filecount[section][tag]
label = (
tags_by_lang[lang][tag]
if keys_exists(tags_by_lang, lang, tag)
and tags_by_lang[lang][tag] is not None
else tags_by_lang["en"][tag]
if keys_exists(tags_by_lang, "en", tag)
and tags_by_lang["en"][tag] is not None
else tag
)
if count > 0:
etree.SubElement(
page, "tag", section=section, key=tag, count=str(count)
).text = label
update_if_changed(
site.joinpath(f"tags/.tags.{lang}.xml"),
etree.tostring(page, encoding="utf-8").decode("utf-8"),
)
def update_tags(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Update Tag pages, xmllists and xmls
Creates/update the following files:
* */tags/tagged-<tags>.en.xhtml for each tag used. Apart from being
automatically created, these are regular source files for HTML pages, and
in phase 2 are built into pages listing all news items and events for a
tag.
* */tags/.tags.??.xml with a list of the tags used.
Changing or removing tags in XML files is also considered, in which case a
file is removed from the .xmllist files.
When a tag has been removed from the last XML file where it has been used,
the tagged-* are correctly deleted.
"""
if source_dir.joinpath("tags").exists():
logger.info(f"Updating tags for {source_dir}")
# Create a complete and current map of which tag is used in which files
files_by_tag = {}
tags_by_lang = {}
# Fill out files_by_tag and tags_by_lang
for file in filter(
lambda file:
# Not in tags dir of a source_dir
source_dir.joinpath("tags") not in file.parents,
source_dir.glob("**/*.xml"),
):
for tag in etree.parse(file).xpath("//tag"):
# Get the key attribute, and filter out some invalid chars
key = (
tag.get("key")
.replace("/", "-")
.replace(" ", "-")
.replace(":", "-")
.strip()
)
# Get the label, and strip it.
label = (
escape(tag.text.strip()) if tag.text and tag.text.strip() else None
)
# Load into the dicts
if key not in files_by_tag:
files_by_tag[key] = set()
files_by_tag[key].add(get_basepath(file))
lang = lang_from_filename(file)
if lang not in tags_by_lang:
tags_by_lang[lang] = {}
tags_by_lang[lang][key] = (
tags_by_lang[lang][key]
if key in tags_by_lang[lang] and tags_by_lang[lang][key]
else label
)
# Sort dicts to ensure that they are stable between runs
files_by_tag = sort_dict(files_by_tag)
for tag in files_by_tag:
files_by_tag[tag] = sorted(files_by_tag[tag])
tags_by_lang = sort_dict(tags_by_lang)
for lang in tags_by_lang:
tags_by_lang[lang] = sort_dict(tags_by_lang[lang])
logger.debug("Updating tag pages")
pool.starmap(
_update_tag_pages,
map(lambda tag: (source_dir, tag, languages), files_by_tag.keys()),
)
logger.debug("Updating tag lists")
pool.starmap(
update_if_changed,
map(
lambda tag: (
Path(f"{source_dir}/tags/.tagged-{tag}.xmllist"),
("\n".join(map(lambda file: str(file), files_by_tag[tag])) + "\n"),
),
files_by_tag.keys(),
),
)
logger.debug("Updating tag sets")
# Get count of files with each tag in each section
filecount = {}
for section in ["news", "events"]:
filecount[section] = {}
for tag in files_by_tag:
filecount[section][tag] = len(
list(
filter(
lambda path: section in str(path.parent),
files_by_tag[tag],
)
)
)
pool.starmap(
_update_tag_sets,
map(
lambda lang: (source_dir, lang, filecount, files_by_tag, tags_by_lang),
filter(lambda lang: lang in languages, tags_by_lang.keys()),
),
)

View File

@ -1,175 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import datetime
import fnmatch
import logging
import multiprocessing
import re
from pathlib import Path
import lxml.etree as etree
from build.lib.misc import (
get_basepath,
lang_from_filename,
touch_if_newer_dep,
update_if_changed,
)
logger = logging.getLogger(__name__)
def _update_for_base(
base: Path, all_xml: set[Path], nextyear: str, thisyear: str, lastyear: str
) -> None:
"""
Update the xmllist for a given base file
"""
matching_files = set()
# If sources exist
if base.with_suffix(".sources").exists():
# Load every file that matches the pattern
# If a tag is included in the pattern, the file must contain that tag
with base.with_suffix(".sources").open(mode="r") as file:
for line in file:
pattern = (
re.sub(r"(\*)?:\[.*\]$", "*", line)
.replace("$nextyear", nextyear)
.replace("$thisyear", thisyear)
.replace("$lastyear", lastyear)
.strip()
)
if len(pattern) <= 0:
logger.debug("Pattern too short, continue!")
continue
tag = (
re.search(r":\[(.*)\]", line).group(1).strip()
if re.search(r":\[(.*)\]", line) is not None
else ""
)
for xml_file in filter(
lambda xml_file:
# Matches glob pattern
fnmatch.fnmatchcase(str(xml_file), pattern)
# contains tag if tag in pattern
and (
any(
map(
lambda xml_file_with_ending: etree.parse(
xml_file_with_ending
).find(f".//tag[@key='{tag}']")
is not None,
xml_file.parent.glob(f"{xml_file.name}.*.xml"),
)
)
if tag != ""
else True
)
# Not just matching an empty xml_file
and len(str(xml_file)) > 0,
all_xml,
):
matching_files.add(str(xml_file))
for file in Path("").glob(f"{base}.??.xhtml"):
xslt_root = etree.parse(file)
for module in xslt_root.xpath("//module"):
matching_files.add(f"global/data/modules/{module.get('id')}".strip())
matching_files = sorted(matching_files)
update_if_changed(
Path(f"{base.parent}/.{base.name}.xmllist"),
("\n".join(matching_files) + "\n") if matching_files else "",
)
def _update_module_xmllists(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Update .xmllist files for .sources and .xhtml containing <module>s
"""
logger.info("Updating XML lists")
# Get all the bases and stuff before multithreading the update bit
all_xml = set(
map(
lambda path: get_basepath(path),
filter(
lambda path: lang_from_filename(path) in languages,
list(source_dir.glob("**/*.*.xml"))
+ list(Path("global/").glob("**/*.*.xml")),
),
)
)
source_bases = set(
map(
lambda path: path.with_suffix(""),
source_dir.glob("**/*.sources"),
)
)
module_bases = set(
map(
lambda path: get_basepath(path),
filter(
lambda path: lang_from_filename(path) in languages
and etree.parse(path).xpath("//module"),
source_dir.glob("**/*.*.xhtml"),
),
)
)
all_bases = source_bases | module_bases
nextyear = str(datetime.datetime.today().year + 1)
thisyear = str(datetime.datetime.today().year)
lastyear = str(datetime.datetime.today().year - 1)
pool.starmap(
_update_for_base,
map(lambda base: (base, all_xml, nextyear, thisyear, lastyear), all_bases),
)
def _check_xmllist_deps(file: Path) -> None:
"""
If any of the sources in an xmllist are newer than it, touch the xmllist
"""
xmls = set()
with file.open(mode="r") as fileobj:
for line in fileobj:
for newfile in Path("").glob(line.strip() + ".??.xml"):
xmls.add(newfile)
touch_if_newer_dep(file, list(xmls))
def _touch_xmllists_with_updated_deps(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Touch all .xmllist files where one of the contained files has changed
"""
logger.info("Checking contents of XML lists")
pool.map(_check_xmllist_deps, source_dir.glob("**/.*.xmllist"))
def update_xmllists(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool
) -> None:
"""
Update XML filelists (*.xmllist)
Creates/update the following files:
* <dir>/.<base>.xmllist for each <dir>/<base>.sources as well as for each
fsfe.org/tags/tagged-<tags>.en.xhtml. These files are used in phase 2 to include the
correct XML files when generating the HTML pages. It is taken care that
these files are only updated whenever their content actually changes, so
they can serve as a prerequisite in the phase 2 Makefile.
Changing or removing tags in XML files is also considered, in which case a
file is removed from the .xmllist files.
When a tag has been removed from the last XML file where it has been used,
the tagged-* are correctly deleted.
"""
_update_module_xmllists(source_dir, languages, pool)
_touch_xmllists_with_updated_deps(source_dir, languages, pool)

View File

@ -1,6 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# __init__.py is a special Python file that allows a directory to become
# a Python package so it can be accessed using the 'import' statement.

View File

@ -1,59 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
import shutil
from pathlib import Path
logger = logging.getLogger(__name__)
def _copy_file(target: Path, source_dir: Path, source_file: Path) -> None:
target_file = target.joinpath(source_file.relative_to(source_dir))
if (
not target_file.exists()
or source_file.stat().st_mtime > target_file.stat().st_mtime
):
logger.debug(f"Copying {source_file} to {target_file}")
target_file.parent.mkdir(parents=True, exist_ok=True)
target_file.write_bytes(source_file.read_bytes())
# preserve file modes
shutil.copymode(source_file, target_file)
def copy_files(source_dir: Path, pool: multiprocessing.Pool, target: Path) -> None:
"""
Copy images, docments etc
"""
logger.info("Copying over media and misc files")
pool.starmap(
_copy_file,
map(
lambda file: (target, source_dir, file),
list(
filter(
lambda path: path.is_file()
and path.suffix
not in [
".md",
".yml",
".gitignore",
".sources",
".xmllist",
".xhtml",
".xsl",
".xml",
".less",
".py",
".pyc",
]
and path.name not in ["Makefile"],
source_dir.glob("**/*"),
)
)
# Special case hard code pass over orde items xml required by cgi script
+ list(source_dir.glob("order/data/items.en.xml")),
),
)

View File

@ -1,35 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
from build.lib.misc import get_basename
logger = logging.getLogger(__name__)
def _do_symlinking(target: Path) -> None:
source = target.parent.joinpath(
f"index{target.with_suffix('').suffix}{target.suffix}"
)
if not source.exists():
source.symlink_to(target.relative_to(source.parent))
def create_index_symlinks(
source_dir: Path, pool: multiprocessing.Pool, target: Path
) -> None:
"""
Create index.* symlinks
"""
logger.info("Creating index symlinks")
pool.map(
_do_symlinking,
filter(
lambda path: get_basename(path) == path.parent.name,
target.glob("**/*.??.html"),
),
)

View File

@ -1,28 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
logger = logging.getLogger(__name__)
def _do_symlinking(target: Path) -> None:
source = target.with_suffix("").with_suffix(f".html{target.with_suffix('').suffix}")
if not source.exists():
source.symlink_to(target.relative_to(source.parent))
def create_language_symlinks(
source_dir: Path, pool: multiprocessing.Pool, target: Path
) -> None:
"""
Create symlinks from file.<lang>.html to file.html.<lang>
"""
logger.info("Creating language symlinks")
pool.map(
_do_symlinking,
target.glob("**/*.??.html"),
)

View File

@ -1,112 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
from build.lib.misc import get_basepath
from build.lib.process_file import process_file
logger = logging.getLogger(__name__)
def _run_process(
target_file: Path, processor: Path, source_file: Path, basename: Path, lang: str
):
# if the target file does not exist, we make it
if not target_file.exists() or any(
# If any source file is newer than the file to be generated
# we recreate the generated file
# if the source file does not exist, ignore it.
map(
lambda file: (
file.exists() and file.stat().st_mtime > target_file.stat().st_mtime
),
[
(
source_file
if source_file.exists()
else basename.with_suffix(".en.xhtml")
),
processor,
(
source_file.parent.joinpath("." + basename.name).with_suffix(
".xmllist"
)
),
Path(f"global/data/texts/.texts.{lang}.xml"),
Path(f"global/data/topbanner/.topbanner.{lang}.xml"),
Path("global/data/texts/texts.en.xml"),
],
)
):
logger.debug(f"Building {target_file}")
result = process_file(source_file, processor)
target_file.parent.mkdir(parents=True, exist_ok=True)
target_file.write_text(result)
def _process_dir(
source_dir: Path, languages: list[str], target: Path, dir: Path
) -> None:
for basename in set(map(lambda path: path.with_suffix(""), dir.glob("*.??.xhtml"))):
for lang in languages:
source_file = basename.with_suffix(f".{lang}.xhtml")
target_file = target.joinpath(
source_file.relative_to(source_dir)
).with_suffix(".html")
processor = (
basename.with_suffix(".xsl")
if basename.with_suffix(".xsl").exists()
else basename.parent.joinpath(".default.xsl")
)
_run_process(target_file, processor, source_file, basename, lang)
def _process_stylesheet(
source_dir: Path, languages: list[str], target: Path, processor: Path
) -> None:
basename = get_basepath(processor)
destination_base = target.joinpath(basename.relative_to(source_dir))
for lang in languages:
target_file = destination_base.with_suffix(
f".{lang}{processor.with_suffix('').suffix}"
)
source_file = basename.with_suffix(f".{lang}.xhtml")
_run_process(target_file, processor, source_file, basename, lang)
def process_files(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool, target: Path
) -> None:
"""
Build .html, .rss and .ics files from .xhtml sources
"""
# TODO for performance it would be better to iterate by processor xls, and parse it only once and pass the xsl object to called function.
logger.info("Processing xhtml files")
pool.starmap(
_process_dir,
map(
lambda dir: (source_dir, languages, target, dir),
set(map(lambda path: path.parent, source_dir.glob("**/*.*.xhtml"))),
),
)
logger.info("Processing rss files")
pool.starmap(
_process_stylesheet,
map(
lambda processor: (source_dir, languages, target, processor),
source_dir.glob("**/*.rss.xsl"),
),
)
logger.info("Processing ics files")
pool.starmap(
_process_stylesheet,
map(
lambda processor: (source_dir, languages, target, processor),
source_dir.glob("**/*.ics.xsl"),
),
)

View File

@ -1,30 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# -----------------------------------------------------------------------------
# script for FSFE website build, phase 2
# -----------------------------------------------------------------------------
import logging
import multiprocessing
from pathlib import Path
from .copy_files import copy_files
from .create_index_symlinks import create_index_symlinks
from .create_language_symlinks import create_language_symlinks
from .process_files import process_files
logger = logging.getLogger(__name__)
def phase2_run(
source_dir: Path, languages: list[str], pool: multiprocessing.Pool, target: Path
):
"""
Run all the necessary sub functions for phase2.
"""
logger.info("Starting Phase 2 - Generating output")
process_files(source_dir, languages, pool, target)
create_index_symlinks(source_dir, pool, target)
create_language_symlinks(source_dir, pool, target)
copy_files(source_dir, pool, target)

View File

@ -1,6 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
# __init__.py is a special Python file that allows a directory to become
# a Python package so it can be accessed using the 'import' statement.

View File

@ -1,39 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import http.server
import logging
import multiprocessing
import os
import socketserver
from pathlib import Path
logger = logging.getLogger(__name__)
def _run_webserver(path: str, port: int) -> None:
"""
Given a path as a string and a port it will
serve that dir on that localhost:port for forever.
"""
os.chdir(path)
Handler = http.server.CGIHTTPRequestHandler
with socketserver.TCPServer(("", port), Handler) as httpd:
httpd.serve_forever()
def serve_websites(serve_dir: str, base_port: int, increment_number: int) -> None:
"""
Takes a target directory, a base port and a number to increment port by per dir
It then serves all directories over http on localhost
"""
dirs = sorted(list(filter(lambda path: path.is_dir(), Path(serve_dir).iterdir())))
serves = []
for dir in dirs:
port = base_port + (increment_number * dirs.index(dir))
logging.info(f"{dir.name} served at http://127.0.0.1:{port}")
serves.append((str(dir), port))
with multiprocessing.Pool(len(serves)) as pool:
pool.starmap(_run_webserver, serves)

View File

@ -1,49 +0,0 @@
# SPDX-FileCopyrightText: Free Software Foundation Europe e.V. <https://fsfe.org>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import logging
import multiprocessing
from pathlib import Path
from build.lib.misc import run_command
logger = logging.getLogger(__name__)
def _rsync(stagedir: Path, target: str, port: int) -> None:
run_command(
[
"rsync",
"-av",
"--copy-unsafe-links",
"--del",
str(stagedir) + "/",
target,
]
# Use ssh with a command such that it does not worry about fingerprints, as every connection is a new one basically
# Also specify the sshport, and only load this sshconfig if required
+ (
["-e", f"ssh -o StrictHostKeyChecking=accept-new -p {port}"]
if ":" in target
else []
),
)
def stage_to_target(stagedir: Path, targets: str, pool: multiprocessing.Pool) -> None:
"""
Use a multithreaded rsync to copy the stage dir to all targets.
"""
logger.info("Rsyncing from stage dir to target dir(s)")
pool.starmap(
_rsync,
map(
lambda target: (
stagedir,
(target if "?" not in target else target.split("?")[0]),
(int(target.split("?")[1]) if "?" in target else 22),
),
targets.split(","),
),
)

11
build/process_file.sh Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
basedir="${0%/*}/.."
[ -z "$inc_processor" ] && . "$basedir/build/processor.sh"
. "$basedir/build/arguments.sh"
case "$command" in
process_file) process_file "$workfile" "$processor" ;;
*) die "Unrecognised command or no command given" ;;
esac

56
build/processor.sh Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env bash
inc_processor=true
[ -z "$inc_filenames" ] && . "$basedir/build/filenames.sh"
[ -z "$inc_scaffold" ] && . "$basedir/build/scaffold.sh"
process_file() {
infile="$1"
processor="$2"
shortname=$(get_shortname "$infile")
lang=$(get_language "$infile")
if [ -z "${processor}" ]; then
if [ -f "${shortname}.xsl" ]; then
processor="${shortname}.xsl"
else
# Actually use the symlink target, so the relative includes are searched
# in the correct directory.
processor="$(realpath "${shortname%/*}/.default.xsl")"
fi
fi
# Make sure that the following pipe exits with a nonzero exit code if *any*
# of the commands fails.
set -o pipefail
# The sed command of death below does the following:
# 1. Remove https://fsfe.org (or https://test.fsfe.org) from the start of all
# links
# 2. Change links from /foo/bar.html into /foo/bar.xx.html
# 3. Change links from foo/bar.html into foo/bar.xx.html
# 4. Same for .rss and .ics links
# 5. Change links from /foo/bar/ into /foo/bar/index.xx.html
# 6. Change links from foo/bar/ into foo/bar/index.xx.html
# ... where xx is the language code.
# Everything is duplicated to allow for the href attribute to be enclosed in
# single or double quotes.
# I am strongly convinced that there must be a less obfuscated way of doing
# this. --Reinhard
build_xmlstream "$shortname" "$lang" |
xsltproc --stringparam "build-env" "${build_env}" "$processor" - |
sed -r ':X; N; $!bX;
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href="((https?:)?//[^"]*)";<\1\2 href="#== norewrite ==\3";gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href="([^#"])([^"]*/)?([^\./"]*\.)(html|rss|ics)(#[^"]*)?";<\1\2 href="\3\4\5'"$lang"'.\6\7";gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href="([^#"]*/)(#[^"]*)?";<\1\2 href="\3index.'"$lang"'.html\4";gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href="#== norewrite ==((https?:)?//[^"]*)";<\1\2 href="\3";gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href='\''((https?:)?//[^'\'']*)'\'';<\1\2 href='\''#== norewrite ==\3'\'';gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href='\''([^#'\''])([^'\'']*/)?([^\./'\'']*\.)(html|rss|ics)(#[^'\'']*)?'\'';<\1\2 href='\''\3\4\5'"$lang"'.\6\7'\'';gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href='\''([^#'\'']*/)(#[^'\'']*)?'\'';<\1\2 href='\''\3index.'"$lang"'.html\4'\'';gI
s;<[\r\n\t ]*(a|link)([\r\n\t ][^>]*)?[\r\n\t ]href='\''#== norewrite ==((https?:)?//[^'\'']*)'\'';<\1\2 href='\''\3'\'';gI
'
}

125
build/scaffold.sh Executable file
View File

@ -0,0 +1,125 @@
#!/usr/bin/env bash
inc_scaffold=true
get_version() {
version=$(xsltproc $basedir/build/xslt/get_version.xsl $1)
echo ${version:-0}
}
include_xml() {
# include second level elements of a given XML file
# this emulates the behaviour of the original
# build script which wasn't able to load top
# level elements from any file
if [ -f "$1" ]; then
# Remove <version> because the filename attribute would otherwise be added
# to this element instead of the actual content element.
sed 's;<version>.*</version>;;' "$1" |
sed -r ':X; $bY; N; bX; :Y;
s:<(\?[xX][mM][lL]|!DOCTYPE)[[:space:]]+[^>]+>::g
s:<[^!][^>]*>::;
s:</[^>]*>([^<]*((<[^>]+/>|<!([^>]|<[^>]*>)*>|<\?[^>]+>)[^<]*)*)?$:\1:;'
fi
}
get_attributes() {
# get attributes of top level element in a given
# XHTML file
sed -rn ':X; N; $!bX;
s;^.*<[\n\t\r ]*([xX]|[xX]?[hH][tT])[mM][lL][\n\t\r ]+([^>]*)>.*$;\2;p' "$1"
}
list_langs() {
# list all languages a file exists in by globbing up
# the shortname (i.e. file path with file ending omitted)
# output is readily formatted for inclusion
# in xml stream
for file in "${1}".[a-z][a-z].xhtml; do
language="${file: -8:2}"
text="$(echo -n $(cat "${basedir}/global/languages/${language}"))"
echo "<tr id=\"${language}\">${text}</tr>"
done
}
auto_sources() {
# import elements from source files, add file name
# attribute to first element included from each file
shortname="$1"
lang="$2"
list_file="$(dirname ${shortname})/.$(basename ${shortname}).xmllist"
if [ -f "${list_file}" ]; then
cat "${list_file}" | while read path; do
base="$(basename ${path})"
if [ -f "${basedir}/${path}.${lang}.xml" ]; then
printf '\n### filename="%s" ###\n%s' "${base#.}" "$(include_xml "${basedir}/${path}.${lang}.xml")"
elif [ -f "${basedir}/${path}.en.xml" ]; then
printf '\n### filename="%s" ###\n%s' "${base#.}" "$(include_xml "${basedir}/${path}.en.xml")"
fi
done |
sed -r ':X; N; $!bX;
s;\n### (filename="[^\n"]+") ###\n[^<]*(<![^>]+>[^<]*)*(<([^/>]+/)*([^/>]+))(/?>);\2\3 \1\6;g;'
fi
}
build_xmlstream() {
# assemble the xml stream for feeding into xsltproc
# the expected shortname and language flag indicate
# a single xhtml page to be built
shortname="$1"
lang="$2"
olang="$(echo "${shortname}".[a-z][a-z].xhtml "${shortname}".[e]n.xhtml | sed -rn 's;^.*\.([a-z]{2})\.xhtml.*$;\1;p')"
dirname="${shortname%/*}/"
topbanner_xml="$basedir/global/data/topbanner/.topbanner.${lang}.xml"
texts_xml="$basedir/global/data/texts/.texts.${lang}.xml"
date="$(date +%Y-%m-%d)"
time="$(date +%H:%M:%S)"
if [ -f "${shortname}.${lang}.xhtml" ]; then
act_lang="$lang"
translation_state="up-to-date"
[ $(get_version "${shortname}.${olang}.xhtml") -gt $(get_version "${shortname}.${lang}.xhtml") ] && translation_state="outdated"
[ $(($(get_version "${shortname}.${olang}.xhtml") - 3)) -gt $(get_version "${shortname}.${lang}.xhtml") ] && act_lang="$olang" && translation_state="very-outdated"
else
act_lang="$olang"
translation_state="untranslated"
fi
infile="${shortname}.${act_lang}.xhtml"
cat <<-EOF
<buildinfo
date="$date"
original="$olang"
filename="/${shortname#"$basedir"/}"
fileurl="/${shortname#"$basedir"/*/}"
dirname="/${dirname#"$basedir"/}"
language="$lang"
translation_state="$translation_state"
>
<trlist>
$(list_langs "$shortname")
</trlist>
<topbanner>$(include_xml "$topbanner_xml")</topbanner>
<textsetbackup>$(include_xml "$basedir/global/data/texts/texts.en.xml")</textsetbackup>
<textset>$(include_xml "$texts_xml")</textset>
<document
language="$act_lang"
$(get_attributes "$infile")
>
<set>
$(auto_sources "${shortname}" "$lang")
</set>
$(include_xml "$infile")
</document>
</buildinfo>
EOF
}

View File

@ -5,6 +5,12 @@
<xsl:template name="body_scripts">
<script src="{$urlprefix}/scripts/bootstrap-3.0.3.custom.js"></script>
<xsl:if test="$build-env = 'development'">
<xsl:element name="script">
<xsl:attribute name="src"><xsl:value-of select="$urlprefix"/>/scripts/less.min.js</xsl:attribute>
</xsl:element>
</xsl:if>
</xsl:template>
</xsl:stylesheet>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<!-- showing a dropdown select menu with all countries in /global/countries/*.**.xml -->
<!-- showing a dropdown select menu with all countries in /tools/countries.**.xml -->
<xsl:template name="country-list">
<xsl:param name="required" select="'no'" />
<xsl:param name="class" select="''" />

View File

@ -53,6 +53,28 @@
<xsl:attribute name="content">IE=edge</xsl:attribute>
</xsl:element>
<xsl:choose>
<xsl:when test="$build-env = 'development' and not(/buildinfo/document/@external)">
<xsl:choose>
<xsl:when test="$mode = 'valentine'">
<xsl:element name="link">
<xsl:attribute name="rel">stylesheet/less</xsl:attribute>
<xsl:attribute name="media">all</xsl:attribute>
<xsl:attribute name="href"><xsl:value-of select="$urlprefix"/>/look/valentine.less</xsl:attribute>
<xsl:attribute name="type">text/css</xsl:attribute>
</xsl:element>
</xsl:when>
<xsl:otherwise><!-- not valentine -->
<xsl:element name="link">
<xsl:attribute name="rel">stylesheet/less</xsl:attribute>
<xsl:attribute name="media">all</xsl:attribute>
<xsl:attribute name="href"><xsl:value-of select="$urlprefix"/>/look/fsfe.less</xsl:attribute>
<xsl:attribute name="type">text/css</xsl:attribute>
</xsl:element>
</xsl:otherwise>
</xsl:choose>
</xsl:when>
<xsl:otherwise><!-- not development -->
<xsl:choose>
<xsl:when test="$mode = 'valentine'">
<xsl:element name="link">
@ -71,6 +93,8 @@
</xsl:element>
</xsl:otherwise>
</xsl:choose>
</xsl:otherwise>
</xsl:choose>
<xsl:element name="link">
<xsl:attribute name="rel">stylesheet</xsl:attribute>

View File

@ -54,7 +54,7 @@
<!-- Copyright notice -->
<xsl:element name="p">
<xsl:text>Copyright © 2001-2025 </xsl:text>
<xsl:text>Copyright © 2001-2024 </xsl:text>
<xsl:element name="a">
<xsl:attribute name="href">
<xsl:value-of select="$urlprefix"/>

View File

@ -11,7 +11,6 @@
<xsl:element name="a">
<xsl:attribute name="id">logo</xsl:attribute>
<xsl:attribute name="aria-label">FSFE Logo</xsl:attribute>
<xsl:attribute name="href"><xsl:value-of select="$urlprefix"/>/</xsl:attribute>
<xsl:element name="span">
<xsl:call-template name="fsfe-gettext">
@ -182,7 +181,6 @@
<xsl:element name="button">
<xsl:attribute name="class">btn btn-primary</xsl:attribute>
<xsl:attribute name="type">submit</xsl:attribute>
<xsl:attribute name="aria-label">Search</xsl:attribute>
<xsl:element name="i">
<xsl:attribute name="class">fa fa-search</xsl:attribute>
</xsl:element>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- ====================================================================== -->
<!-- XSL script to extract the <localmenu> dir attribute of an XML file -->
<!-- ====================================================================== -->
<!-- This XSL script outputs the "dir" attribute of the <localmenu> element -->
<!-- of an XML file. It is used by the script tools/update_localmenus.sh. -->
<!-- ====================================================================== -->
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="text" encoding="UTF-8"/>
<xsl:template match="localmenu">
<xsl:value-of select="@dir"/>
</xsl:template>
<!-- Suppress output of text nodes, which would be the default -->
<xsl:template match="text()"/>
</xsl:stylesheet>

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- ====================================================================== -->
<!-- XSL script to extract the <localmenu> element of an XML file -->
<!-- ====================================================================== -->
<!-- This XSL script outputs a line for the .localmenu.en.xml file from the -->
<!-- <localmenu> element of an .xhtml file. It is used by the script -->
<!-- tools/update_localmenus.sh. -->
<!-- ====================================================================== -->
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="text" encoding="UTF-8"/>
<xsl:template match="localmenu[@id]">
<xsl:text> &lt;localmenuitem set="</xsl:text>
<xsl:choose>
<xsl:when test="@set">
<xsl:value-of select="@set"/>
</xsl:when>
<xsl:otherwise>
<xsl:text>default</xsl:text>
</xsl:otherwise>
</xsl:choose>
<xsl:text>" id="</xsl:text>
<xsl:value-of select="@id"/>
<xsl:text>" link="</xsl:text>
<xsl:value-of select="$link"/>
<xsl:text>"&gt;</xsl:text>
<xsl:value-of select="normalize-space(node())"/>
<xsl:text>&lt;/localmenuitem&gt;</xsl:text>
</xsl:template>
<!-- Suppress output of text nodes, which would be the default -->
<xsl:template match="text()"/>
</xsl:stylesheet>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- ====================================================================== -->
<!-- XSL script to extract the used modules from a .xhtml file -->
<!-- ====================================================================== -->
<!-- This XSL script processes all <module> elements of a .xhtml file and -->
<!-- outputs the source files for these modules, separated by newlines. -->
<!-- It is used by the script tools/update_xmllists.sh. -->
<!-- ====================================================================== -->
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="text" encoding="UTF-8"/>
<xsl:template match="module">
<!-- Directory name -->
<xsl:text>global/data/modules/</xsl:text>
<!-- Filename = module id -->
<xsl:value-of select="@id"/>
<!-- Append a newline -->
<xsl:text>&#xa;</xsl:text>
</xsl:template>
<!-- Suppress output of text nodes, which would be the default -->
<xsl:template match="text()"/>
</xsl:stylesheet>

27
build/xslt/get_tags.xsl Normal file
View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- ====================================================================== -->
<!-- XSL script to extract the content of <tag> elements from an XML file -->
<!-- ====================================================================== -->
<!-- This XSL script processes all <tag> elements of an XML file and -->
<!-- outputs the content of each of these elements, separated by newlines. -->
<!-- It is used by the script tools/update_xmllists.sh. -->
<!-- ====================================================================== -->
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="text" encoding="UTF-8"/>
<xsl:template match="tag">
<!-- Output tag name with some safeguarding against invalid characters -->
<xsl:value-of select="translate(@key, ' /:', '---')"/>
<!-- Output a blank -->
<xsl:text> </xsl:text>
<!-- Output tag label -->
<xsl:value-of select="."/>
<!-- Append a newline -->
<xsl:text>&#xa;</xsl:text>
</xsl:template>
<!-- Suppress output of text nodes, which would be the default -->
<xsl:template match="text()"/>
</xsl:stylesheet>

View File

@ -35,7 +35,7 @@
<xsl:element name="li">
<xsl:element name="a">
<xsl:attribute name="href">
<xsl:value-of select="/buildinfo/@fileurl"/>.<xsl:value-of select="@id"/>.html</xsl:attribute>
<xsl:value-of select="$urlprefix"/><xsl:value-of select="/buildinfo/@fileurl"/>.<xsl:value-of select="@id"/>.html</xsl:attribute>
<xsl:value-of select="." disable-output-escaping="yes"/>
</xsl:element>
</xsl:element>

View File

@ -3,6 +3,7 @@ services:
build: .
image: fsfe-websites
container_name: fsfe-websites
command:
ports:
- 2000:2000
- 2100:2100
@ -10,19 +11,5 @@ services:
- 2300:2300
- 2400:2400
- 2500:2500
secrets:
- KEY_PRIVATE
- KEY_PASSWORD
- GIT_TOKEN
volumes:
- ${VOLUME:-website-cached}:/website-cached
volumes:
website-cached:
website-cached-master:
secrets:
KEY_PRIVATE:
environment: KEY_PRIVATE
KEY_PASSWORD:
environment: KEY_PASSWORD
GIT_TOKEN:
environment: GIT_TOKEN
- ./:/fsfe-websites

View File

@ -1,29 +0,0 @@
# Contributing
## Build Process Code
### Tooling
We check the validity of python code in the repo using [ruff](https://astral.sh/ruff). We use it for both checking and formatting, with `ruff check` enabled in CI.
### Overview Stuff
We try to keep to some design patterns to keep things manageable.
Firstly, each phase as described in [the overview](./overview.md) should handle a meaningfully different kind of interaction. Each phase should be structured, to the greatest degree possible, as a sequence of steps. We consider that each phase should have a `run.py` file that exposes a `ipahse_*run` function that takes the arguments needed for its phase.
Each run function then calls a sequence of functions that are defined in the other files in the `phase*` folder. Each other file in the folder should expose one function, with the same name as the file, minus file extension. For example, `create_files.py` should expose the function `create_files`. It is a common pattern for the first expose function to generate a list of files or things to act on, and then multithread this using another function.
Each step function should use `logger.info` at the top of its function to declare what it is doing.
### Best Practices
This is a little bit of a mesys list of things we have found that are not perhaps entirely obvious.
- When doing manipulation of stuff, have a look in the lib functions to see if it is already present. If you find a common pattern, perhaps functionise it.
- In phase 1, only update files using the `update_if_changed` function. This function will, as expected, take a file path and a string, and only update the file with the string if there is a difference. Not doing this means a file will always be updated, and hence anything depending on it will always be rebuild, even if the file has not actually changed.
- When generating lists that end up in files, take care that they are stable to prevent unnecessary rebuilding.
- All steps are largely considered to be synchronous, and must be finished before the next step can start. Therefore, async must unfortunately be avoided. There are some steps where performance benefits could be achieved by allowing the next step to run concurrently, but the design complications make this unattractive.
- We use a single process pool to multithread with. This gives a small performance benefit over making and deleting pools continuously.
- All paths are to be handled with `pathlib`, not as strings.
- XML code should be generated with LXML instead of string templating. This is to ensure that we generate valid XML every time, and prevents issues with escaping, etc.
- Where possibly, type hint stuff. We try and keep the codebase reasonably typed to make it comprehensible

View File

@ -1,11 +0,0 @@
# Management
Deployment of the website is now handled using drone ci.
To do a new build, go to [the drone page](https://drone.fsfe.org/FSFE/fsfe-website).
If you have the correct permission, you will see a blue new build button in the upper right corner.
After it is pressed, a popup for the new build will appear on-screen. Select the branch the wish to build, `master` or `test`.
To pass extra arguments to the build script add a parameter with key `EXTRA_FLAGS` and the values being an unquoted list of arguments. For example to do a full build one would do `--full`. Be sure and hit the add message on the right after setting to actually enable the parameter for the build. Then hit create, and away we go.

View File

@ -1,42 +0,0 @@
# Overview
This is to serve as a general overview of the build process, largely aimed at those who intent to contribute to the process.
Firstly, the language of the build process is XML. Everything is based around XML, XHTML, XSL, with a few non XML extras the build process adds for features like choosing source files.
## Simple walkthrough
Firstly a general case of how the build process works. The build process,
1. Takes an XHTML source file, with name `<name>.<lang>.xhtml`
2. If it exists in the same dir, loads `<name>.sources` and makes any data files specified in it available.
3. This making available is done by loading the source XHTML, and specified data files, texts files for common strings into a new in memory XML.
4. Selects an XSL file to process the in memory XML with. It will use `<name>.xsl` if available, else the first `default.xsl` it finds ascending through parent dirs.
5. Processes the file, and puts the result into the output dir with name `<name>.<lang>.<html>`.
6. If the file name matches its parent directory name, IE it is in `<name>/<name>.<lang>.xhtml`, then generate an index symlink in the output dir.
7. Generate symlinks from `<name>.<lang>.html` to `<name>.html.<lang>`, for [Apache Multiviews](https://httpd.apache.org/docs/current/content-negotiation.html) (how we handle choosing pages based on user browser language)
8. If the site has been staged, which occurs when manually specified, using multiple targets, or using ssh targets. In this cases the build process will have built the sites to a local directory, and will then copy them to each of the specified targets.
## General details
The website uses incremental caching. These cache files are generally hidden files, and should be `gitignored`. For exactly what are cache files see the `gitignore` in the repo root. The build process determines what must be updated based on the file modification time of dependencies of any file.
Care has been taken to make sure that files are rebuilt only when required, and are always rebuild when required. But changes to fundamentals may not propagate correctly and may require a manually triggered full rebuild, or waiting till the nightly one on prod.
## Phases
For details on the phases and exact implementation please examine the codebase
The build process can be conceptually divided into four phases: Phases 0-3
Phases 0 and 3 contain steps that may or not be performed based on passed arguments. They also act over all sites at once.
Phases 1 and 2 always run all steps inside them, and are run on a per-site basis.
### [phase0](./phase0.md)
### [phase1](./phase1.md)
### [phase2](./phase2.md)
### [phase3](./phase3.md)

View File

@ -1,7 +0,0 @@
# Phase 0
Parse arguments, apply any special cases from arguments such as updating the repo using git, cleaning cache from old builds for a full rebuild, etc.
Once this phase is over our dependencies are available, arguments parsed and ready to start the next phase.
And now, [phase1](./phase1.md)

View File

@ -1,19 +0,0 @@
# Phase 1
Phase 1 is arguably the most complicated phase.
This phase never modifies the output directory, it concerns itself only with generating files in the source tree that are then used in phase 2.
It handles the generation of files not generated with an XSL processor (search indices, compiling CSS, etc) and ensuring the dependencies of each XHTML are present and up to date.
Phase 1 goes through XSL stylesheets, and updates the modification time of those whose parents have been updated, so that only the stylesheet an XHTML file depends on directly depends on modification times need be considered.
It runs the subdirectory scripts, which perform custom actions required by those directories. For example, the news and events subdirectory in the main site, use subdirectory scripts to generate indices based on a template for every year in the archives.
It also generates the paths for global texts files, for storing commonly used strings based on if that text exists or else it falls back to English.
It does a few other things, as well, for an exhaustive list please examine the codebase.
After phase 1 is complete we can be reasonably sure that dependencies for phase 2 etc are in place, and with timestamps that phase2 can depend on to be useful.
And now, [phase2](./phase2.md)

View File

@ -1,19 +0,0 @@
# Phase 2
After all the XHTML files and their dependencies have been nicely lined up and prepared in phase 1, phase 2 begins
It will
1. Takes an XHTML source file, with name `<name>.<lang>.xhtml`
2. If it exists in the same dir, loads `<name>.sources` and makes any data files specified in it available.
3. This making available is done by loading the source XHTML, and specified data files, texts files for common strings into a new in memory XML.
4. Selects an XSL file to process the in memory XML with. It will use `<name>.xsl` if available, else the first `default.xsl` it finds ascending through parent dirs.
5. Processes the file, and puts the result into the output dir with name `<name>.<lang>.<html>`.
6. If the file name matches its parent directory name, IE it is in `<name>/<name>.<lang>.xhtml`, then generate an index symlink in the output dir.
7. Generate symlinks from `<name>.<lang>.html` to `<name>.html.<lang>`, for [Apache Multiviews](https://httpd.apache.org/docs/current/content-negotiation.html) (how we handle choosing pages based on user browser language)
8. Using some somewhat advanced XSL wizardry, Phase 2 is also able to build RSS and ICS files from XML data. The process of selecting stylesheets etc is the same as for XHTML>
9. Copies over any static files that are needed, like images, PDFs etc.
After phase 2 is over we have a copy of all sites built. If the site was not staged then it is in the target directory if it was staged, then it is in the staging directory.
And now, [phase3](./phase3.md)

View File

@ -1,9 +0,0 @@
# Phase 3
This final phase relates to finishing up the build
If the build was staged then this is the phase where the built result gets copied to the target(s).
And if specified the resulting build is served over localhost for rapid debugging.
And finished!

View File

@ -1,47 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Ran by dockerfile as entrypoint
# Ran from the volume of the website source mounted at /website-source
# Load sshkeys
if [ -f /run/secrets/KEY_PRIVATE ] && [ "$(cat /run/secrets/KEY_PRIVATE)" != "none" ]; then
# Start ssh-agent
eval "$(ssh-agent)"
# Create config file with required keys
mkdir -p ~/.ssh
echo "AddKeysToAgent yes" >~/.ssh/config
# Tighten permissions to keep ssh-add happy
chmod 400 /run/secrets/KEY_*
PASSWORD="$(cat "/run/secrets/KEY_PASSWORD")"
PRIVATE="$(cat "/run/secrets/KEY_PRIVATE")"
# Really should be able to just read from the private path, but for some reason ssh-add fails when using the actual path
# But works when you cat the path into another file and then load it
# Or cat the file and pipe it in through stdin
# Piping stdin to an expect command is quite complex, so we just make and remove a temporary key file.
# Absolutely bizarre, and not quite ideal security wise
echo "$PRIVATE" >/tmp/key
chmod 600 /tmp/key
# Use our wrapper expect script to handle interactive input
./exp.exp "$PASSWORD" ssh-add "/tmp/key"
rm /tmp/key
echo "SSH Key Loaded"
else
echo "Secret not defined!"
fi
if [ -f /run/secrets/GIT_TOKEN ] && [ "$(cat /run/secrets/GIT_TOKEN)" != "none" ]; then
export GIT_TOKEN="$(cat "/run/secrets/GIT_TOKEN")"
fi
# Rsync files over, do not use the mtimes as they are wrong due to docker shenanigans
# Use the .gitignore as a filter to not remove any files generated by previous runs
rsync -rlpgoDz --delete --checksum --filter=':- .gitignore' ./ /website-cached/source
# Change to source repo
cd /website-cached/source
# run build script expaning all args passed to this script
python3 ./build.py "$@"

12
exp.exp
View File

@ -1,12 +0,0 @@
#!/usr/bin/env expect
set timeout 20
set cmd [lrange $argv 1 end]
set password [lindex $argv 0]
eval spawn $cmd
expect "Enter passphrase*:"
send "$password\r"
lassign [wait] pid spawn_id os_error actual_exit_code
interact

18
fsfe.org/.gitignore vendored
View File

@ -1,18 +0,0 @@
# css
look/fsfe.min.css
look/valentine.min.css
# automatically generate subdirectory stuff
events/????/index.??.xhtml
events/????/index.sources
events/????/index.xsl
news/????/index.??.xhtml
news/????/index.sources
news/????/index.xsl
news/*/.*.??.xml
# search index
search/index.js
#tags
tags/tagged-*.??.xhtml
tags/.tags.??.xml
# internal activities file
internal/fsfe-activities-options.*.xml

View File

@ -58,18 +58,14 @@ RewriteRule ^women(/.*)?$ https://wiki.fsfe.org/Teams/Women [R=301,L]
RewriteRule ^standards(.*) /freesoftware/standards$1 [R=301,L]
RewriteRule ^education(.*) /freesoftware/education$1 [R=301,L]
# Apple Litigation
RewriteRule ^activities/apple-litigation/confidentialrequest(.*) https://curia.europa.eu/juris/document/document.jsf?text=&docid=295904&pageIndex=0&doclang=en&mode=req&dir=&occ=first&part=1&cid=13488140f [R=307,L]
# Translators
RewriteRule ^translators/weblate$ https://hosted.weblate.org/projects/fsfe [R=301,L]
RewriteRule ^translators/weblate https://hosted.weblate.org/projects/fsfe [R=301,L]
RewriteRule ^translators(.*) /contribute/translators/translators.html [R=301,L]
# =============================================================================
# CAMPAIGN/TOPIC-SPECIFIC REDIRECTS
# =============================================================================
# Podcast
RewriteRule ^news/podcast/?$ /news/podcast.html [R=301,L]
RewriteRule ^news/podcast.rss$ /news/podcast.en.rss [L,R=301]
@ -81,8 +77,8 @@ RewriteRule ^news/latest-news$ /news/2024/news-20240610-01.html [R=301,L]
RewriteRule ^activities/ilovefs/toolkit$ https://download.fsfe.org/campaigns/ilovefs/toolkit/ [R=301,L]
# TODO each year: update links for latest report
RewriteRule ^activities/ilovefs/latest-report$ /activities/ilovefs/report/report_2025.html [R=307,L]
RewriteRule ^activities/ilovefs/latest-podcast$ /news/podcast/episode-30.html [R=301,L]
RewriteRule ^activities/ilovefs/latest-report$ /activities/ilovefs/report/report_2024.html [R=301,L]
RewriteRule ^activities/ilovefs/latest-podcast$ /news/podcast/episode-24.html [R=301,L]
# Redirect from old locations
RewriteRule ^activities/ilovefs/ilovefs(.*)html$ /activities/ilovefs/index$1html [R=301,L]
@ -107,12 +103,7 @@ RewriteRule ^drm.info(/.*)? https://drm.info$1 [R=301,L]
# TODO please update the link for new registration form
RewriteRule ^activities/yh4f/register https://share.fsfe.org/apps/forms/s/kQX233iKfwe3ZtgiHxwLZJNB [R=301,L]
RewriteRule ^activities/yh4f/feedback https://share.fsfe.org/apps/forms/s/LBHwzSmaiyoX3Tmwo2qJgAki [R=301,L]
RewriteRule ^activities/yh4f/mediakit https://download.fsfe.org/YH4F/Youth_Hacking_4_Freedom_2025.pdf [R=301,L]
# Current ThankGNUs page
# TODO each year: update redirect to current page
RewriteRule ^donate/thankgnus$ /donate/thankgnus-2025 [R=303,L]
RewriteRule ^donate/thankgnus\.(.*)$ /donate/thankgnus-2025.$1 [R=303,L]
RewriteRule ^activities/yh4f/mediakit https://download.fsfe.org/YH4F/Youth%20Hacking%204%20Freedom%20-%202025.pdf [R=301,L]
# =============================================================================
# MOVED PAGES

View File

@ -1,5 +1,5 @@
Contact: mailto:security@fsfe.org
Expires: 2026-01-15T23:00:00.000Z
Expires: 2024-12-30T23:00:00.000Z
Encryption: openpgp4fpr:168FAB826B58B16874CE6E207784A1960FBEB3FA
Encryption: openpgp4fpr:23EEF484FDF8291CBA09A40625FE376FF17694A1
Encryption: openpgp4fpr:AEEA84E56F3C69EAEECCA354C465BEB43C11B337

View File

@ -101,7 +101,7 @@
</li>
</ul>
<h2>Mettersi in contatto</h2>
<h2>Mettersi in conttatto</h2>
<ul>
<li>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8" ?>
<associateset>
<version>1</version>
<associate id="openlabs">
<name>المختبرات المفتوحة</name>
<description>
<p>تتمثل مهمة المختبرات المفتوحة <span lang="en">Open Labs</span> في تعزيز الانفتاح والحرية والشفافية واللامركزية من خلال رفع أصوات الجميع بوصفهم وسطاً معنياً بالبرمجيات الحرة.</p>
</description>
<link>https://openlabs.cc/en/</link>
</associate>
</associateset>

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8" ?>
<associateset>
<version>1</version>
<associate id="openlabs">
<name>Open Labs</name>
<description>
<p>
Open Labs' mission is to promote openness, freedom, transparency and
decentralization by amplifying our voice as a community
altogether.
</p>
</description>
<link>https://openlabs.cc/en/</link>
</associate>
</associateset>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" ?>
<html>
<version>8</version>
<version>2</version>
<head>
<title>Kontakt</title>
@ -35,6 +35,9 @@
Wenn Sie eine verschlüsselte E-Mail bevorzugen,
senden Sie sie bitte direkt an <a href="/about/people/kirschner/">Matthias Kirschner</a>.
</p>
</div>
<div class="box">
<h2 id="media">Presseanfragen</h2>
<p>
@ -42,10 +45,6 @@
Alternativ schreiben Sie eine E-Mail an <email mailto="yes">press@fsfe.org</email>.
</p>
</div>
<div class="box">
<h2 id="ftf">Anfragen zu rechtlichen Themen</h2>
<p>
Für Unterstützung in Lizenzfragen zu Freier Software senden Sie bitte eine E-Mail
@ -53,14 +52,6 @@
Weitere Infos finden Sie außerdem auf der <a href="/freesoftware/legal/faq.html#lqteam">Legal FAQ</a> Seite.
</p>
<h2>Technische Anfragen</h2>
<p>
Wenn Sie technische Fragen zu unseren Dienstleistungen haben oder auf technische Probleme stoßen,
überprüfen Sie bitte die <a href="https://status.fsfe.org">Status Seite</a> auf bekannte Probleme.
Wenn Ihr Problem dort nicht aufgeführt ist, oder Sie weitere Fragen dazu haben, lassen Sie uns diese
unter <email>techsupport@fsfe.org</email> wissen.
</p>
</div>
</div>
@ -139,6 +130,44 @@
<div class="box first">
<h3 id="mailinglist">Mailinglisten</h3>
<p>
Wir stellen eine Reihe von Mailing-Listen für themenbezogene
Diskussionen und für den Austausch lokaler Gruppen zur Verfügung. Ein
komplettes Verzeichnis unserer öffentlichenm Mailing-Listen findet
sich auf <a href="https://lists.fsfe.org/">unserem
Mailinglisten-Server</a>.
</p>
<h3 id="forum">Forum</h3>
<p>
Die FSFE bietet <a href="https://community.fsfe.org">Discourse</a> an, eine
forenähnlichen Plattform. Sie ist eine Ergänzung unserer Mailinglisten und
steht für Diskussionen und Kommentare bereit. Du bist herzlich eingeladen,
ein Teil dieses Forums zu werden und uns zu helfen, es zu verbessern!
</p>
<h3 id="micro">Microblogging</h3>
<p>
Die FSFE und viele Unterstützer nutzen Microbologging-Dienste für den
Austausch aktueller Meldungen und interessanter Links. Wenn du dich
mit uns verbinden möchtest, folge dem Account <em>fsfe</em> in den
unterschiedlichen Netzwerken, wie <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
Damit deine Beiträge zu Freier Software gefunden werden, nutze die
passenden Hash-Tags, wie #FreeSoftware, #IloveFS oder #FSFE in deinen
Nachrichten.
</p>
</div>
<div class="box">
<h3 id="chat">Chat rooms</h3>
<h4 id="matrix">Matrix</h4>
@ -171,43 +200,6 @@
<li>Server: <em>irc.libera.chat</em></li>
<li>Channel: <em>#fsfe</em></li>
</ul>
</div>
<div class="box">
<h3 id="mailinglist">Mailinglisten</h3>
<p>
Wir stellen eine Reihe von Mailing-Listen für themenbezogene
Diskussionen und für den Austausch lokaler Gruppen zur Verfügung. Ein
komplettes Verzeichnis unserer öffentlichenm Mailing-Listen findet
sich auf <a href="https://lists.fsfe.org/">unserem
Mailinglisten-Server</a>.
</p>
<h3 id="micro">Microblogging</h3>
<p>
Die FSFE und viele Unterstützer nutzen Microbologging-Dienste für den
Austausch aktueller Meldungen und interessanter Links. Wenn du dich
mit uns verbinden möchtest, folge dem Account <em>fsfe</em> in den
unterschiedlichen Netzwerken, wie <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
Damit deine Beiträge zu Freier Software gefunden werden, nutze die
passenden Hash-Tags, wie #FreeSoftware, #IloveFS oder #FSFE in deinen
Nachrichten.
</p>
<h3>Lokale Gruppen</h3>
<p>
Wir haben ein Netzwerk von Ehrenamtlichen, die sich in lokalen Gruppen in ganz Europa treffen.
Weitere Informationen, wie Sie die Gruppen erreichen und unterstützen können finden Sie auf der <a href="/about/groups.html">Seite zu den lokalen Gruppen</a>.
</p>
</div>
</div>

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" ?>
<html>
<version>8</version>
<version>7</version>
<head>
<title>Contact</title>
@ -119,6 +119,48 @@
<div class="grid-50-50">
<div class="box first">
<h3 id="mailinglist">Mailing Lists</h3>
<p>
We have a number of mailing lists for local groups as well as thematic
teams. You can find a complete list of the public lists on our
<a href="https://lists.fsfe.org/">mailing list server</a>.
</p>
<h3 id="forum">Forum</h3>
<p>
The FSFE provides <a href="https://community.fsfe.org">Discourse</a>, a
forum-like discussion platform. It is a complement to our mailing lists
and allows for discussions and comments. You are welcome to become part of
this new service and help us improving it.
</p>
<h3 id="micro">Microblogging</h3>
<p>
The FSFE and many supporters use microblogging
services to send status updates and interesting links.
Follow the user fsfe in its diverse social media networks,
such as <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
We encourage you to use the tags <code>#FreeSoftware</code>,
<code>#IloveFS</code>, <code>#FSFE</code>, etc. in your messages.
</p>
<h3 id="LocalGroups">Local Groups</h3>
<p>
We have a network of Free Software volunteers meeting in local groups throughout Europe.
Find more information about how to reach and join them at the <a href="/about/groups.html">local groups overview</a> page.
</p>
</div>
<div class="box">
<h3 id="chat">Chat rooms</h3>
<h4 id="matrix">Matrix</h4>
@ -152,39 +194,6 @@
<li>Server: <code>irc.libera.chat</code></li>
<li>Channel: <code>#fsfe</code></li>
</ul>
</div>
<div class="box">
<h3 id="mailinglist">Mailing Lists</h3>
<p>
We have a number of mailing lists for local groups as well as thematic
teams. You can find a complete list of the public lists on our
<a href="https://lists.fsfe.org/">mailing list server</a>.
</p>
<h3 id="micro">Microblogging</h3>
<p>
The FSFE and many supporters use microblogging
services to send status updates and interesting links.
Follow the user fsfe in its diverse social media networks,
such as <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
We encourage you to use the tags <code>#FreeSoftware</code>,
<code>#IloveFS</code>, <code>#FSFE</code>, etc. in your messages.
</p>
<h3 id="LocalGroups">Local Groups</h3>
<p>
We have a network of Free Software volunteers meeting in local groups throughout Europe.
Find more information about how to reach and join them at the <a href="/about/groups.html">local groups overview</a> page.
</p>
</div>
</div>

View File

@ -112,6 +112,46 @@
<div class="grid-50-50">
<div class="box first">
<h3 id="mailinglist">E-maillijsten</h3>
<p>
Wij hebben een aantal e-maillijsten voor lokale groepen en voor thematische
teams. U vindt een volledige lijst van de publieke lijsten op onze
<a href="https://lists.fsfe.org/">e-maillijstserver</a>.
</p>
<h3 id="forum">Forum</h3>
<p>
De FSFE biedt <a href="https://community.fsfe.org">Discourse</a>, een
forum-achtig discussieplatform. Het is een aanvulling op onze e-maillijsten
en maakt discussies en commentaar mogelijk. Wij verwelkomen u als deelnemer aan
deze nieuwe dienst en stellen het op prijs als u helpt deze te verbeteren.
</p>
<h3 id="micro">Microblogging</h3>
<p>
De FSFE en veel supporters gebruiken microblogdiensten om status-updates en
interessante links te sturen. Als u graag contact houdt, volg dan de <em>fsfe</em>-gebruiker
in verschillende sociale medianetwerken, zoals <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
We moedigen u aan om de trefwoorden (tags) #FreeSoftware,
#IloveFS, etc. in uw berichten te gebruiken.
</p>
<h3 id="LocalGroups">Lokale groepen</h3>
<p>
We hebben een netwerk van Vrije Softwarevrijwilligers die elkaar in heel Europa in lokale groepen ontmoeten. Vind meer informatie over hoe ze te bereiken en met hen mee te doen op de pagina met het <a href="/about/groups.html">overzicht van lokale groepen</a>.
</p>
</div>
<div class="box">
<h3 id="chat">Kletsruimtes</h3>
<h4 id="matrix">Matrix</h4>
@ -145,37 +185,6 @@
<li>Server: <code>irc.libera.chat</code></li>
<li>Channel: <code>#fsfe</code></li>
</ul>
</div>
<div class="box">
<h3 id="mailinglist">E-maillijsten</h3>
<p>
Wij hebben een aantal e-maillijsten voor lokale groepen en voor thematische
teams. U vindt een volledige lijst van de publieke lijsten op onze
<a href="https://lists.fsfe.org/">e-maillijstserver</a>.
</p>
<h3 id="micro">Microblogging</h3>
<p>
De FSFE en veel supporters gebruiken microblogdiensten om status-updates en
interessante links te sturen. Als u graag contact houdt, volg dan de <em>fsfe</em>-gebruiker
in verschillende sociale medianetwerken, zoals <a href="https://mastodon.social/@fsfe">Mastodon</a>.
</p>
<p>
We moedigen u aan om de trefwoorden (tags) #FreeSoftware,
#IloveFS, etc. in uw berichten te gebruiken.
</p>
<h3 id="LocalGroups">Lokale groepen</h3>
<p>
We hebben een netwerk van Vrije Softwarevrijwilligers die elkaar in heel Europa in lokale groepen ontmoeten. Vind meer informatie over hoe ze te bereiken en met hen mee te doen op de pagina met het <a href="/about/groups.html">overzicht van lokale groepen</a>.
</p>
</div>
</div>

View File

@ -1,19 +1,19 @@
<?xml version="1.0" encoding="utf-8" ?>
<html>
<version>4</version>
<version>3</version>
<head>
<title>Grafiken - Buttons für FSFE Supporter</title>
<title>Grafiken - Buttons für Spender und Unterstützer</title>
</head>
<body class="article">
<h1>Buttons für FSFE Supporter</h1>
<h1>Buttons für Spender und Unterstützer</h1>
<div id="introduction">
<p>
Die Free Software Foundation Europe dankt allen <a
href="/donate/thankgnus.html">FSFE Supportern</a>, die die Arbeit der FSFE
href="/donate/thankgnus.html">Spendern</a>, die die Arbeit der FSFE
durch <a href="https://my.fsfe.org/donate">Spenden</a> unterstützt
haben; mit diesen Spenden-Grafiken möchten wir Ihnen die Möglichkeit
geben, Ihre Unterstützung der Free Software Foundation Europe auch in
@ -21,6 +21,24 @@
</p>
</div>
<p>
Auf dieser Seite finden Sie jahrespezifische Grafiken <a
href="#donor-buttons">für Spender</a> sowie Badges <a
href="#supporter-buttons">für unsere langfristigen Supporter</a>.
</p>
<h2 id="donor-buttons">Spender-Grafiken</h2>
<buttons />
<h2 id="supporter-buttons">Supporter-Grafiken</h2>
<p>
Supporter leisten einen regelmäßigen Beitrag von mindestens 60 € jährlich
oder 10 € monatlich. <a href="https://my.fsfe.org/donate">Werden Sie einer
von ihnen</a>!
</p>
<table class="table table-bordered table-condensed table-responsive">
<tr>
<th class="text-center">Design</th>

View File

@ -1,19 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<html>
<version>4</version>
<version>3</version>
<head>
<title>Graphics - FSFE Supporter Buttons</title>
<title>Graphics - Donor and Supporter Buttons</title>
</head>
<body class="article">
<h1>FSFE Supporter Buttons</h1>
<h1>Donor and Supporter Buttons</h1>
<div id="introduction">
<p>
The Free Software Foundation Europe thanks all <a
href="/donate/thankgnus.html">FSFE supporters</a> who have supported the work of
href="/donate/thankgnus.html">donors</a> who have supported the work of
the FSFE through their <a href="/help/donate.html">donations</a>; with
these sponsoring buttons, it seeks to provide a way for donors to
publicly advertise their contribution to the Free Software Foundation
@ -21,6 +21,23 @@
</p>
</div>
<p>
On this page, you will find year-specific graphics <a
href="#donor-buttons">for donors</a> as well as badges <a
href="#supporter-buttons">for our long-term supporters</a>.
</p>
<h2 id="donor-buttons">Donor buttons</h2>
<buttons />
<h2 id="supporter-buttons">Supporter Buttons</h2>
<p>
Our awesome supporters regularly donate at least 60€ per year or 10€ per
month. <a href="https://my.fsfe.org/donate">Become one of them</a>!
</p>
<table class="table table-bordered table-condensed table-responsive">
<tr>
<th class="text-center">Design</th>

View File

@ -10,14 +10,9 @@
<h1 id="community">Community</h1>
<!-- Function below: div class="toc float-right visible-xs visible-sm, toc is the table of content, float right applies as CSS rule to float the element to the right side, visible- xs and visible-sm
ensure that the toc is visible on small devices only otherwise on larger devices (e.g. Computer Desktop) it remains hidden. Be careful to consider where you want the toc to appear on your webpage
while using small devices. This is the point where you need to paste this function. Also make sure to include the code below for the sidebar to hide the toc there on small devices (see bottom of this file)
and for more information search for bootstrap with XHTML. -->
<!-- Creating the TOC: make sure to have the anchors linked with <a href="#ANCHOR">TEXT FOR ANCHOR</a> and to also include those anchors with the id function for the correct heading or paragraph-->
<div class="toc float-right visible-xs visible-sm">
<p class="head"><a href="#community-groups">Community Groups</a></p>
<!-- class="head" suggests that this paragraph is styled as a heading or section title using CSS-->
<p>Local Groups</p>
<ul style="white-space: nowrap;">
<li><a href="#austria">🇦🇹 Austria</a></li>
@ -307,7 +302,6 @@
</body>
<sidebar promo="our-work">
<div class="hidden-xs hidden-sm">
<!-- hidden-xs and hidden-sm hides the code on small devices like phones, tablets, etc.-->
<h3>Local Groups</h3>
<ul style="white-space: nowrap;">
<li><a href="#austria">🇦🇹 Austria</a></li>

View File

@ -150,8 +150,8 @@
interested in diving into these topics, you've come to the right place.
You're fired up about a free information society, and you've already done something to tell others
why freedom matters? Then this internship is for you. Your academic
background and qualifications matter less than your experience,
willingness to learn, and ability to get things done. We expect:
background and qualifications matter less than your experience, your
will to learn, and your ability to get things done. We expect:
</p>
<ul>
@ -178,7 +178,7 @@
internship, unless you're applying for a technical
internship.
</li>
<li>At the FSFE we value diversity and acknoeledge the difficulties women*, BPoC, Trans*, people with disabilities, people with a history of migration, and people from other underrepresented groups face in the IT world. Therefore we want to emphasise, that we particularly welcome applications from people who are underrepresented in the Free Software movement and the IT environment.</li>
<li>Enhancing diversity in Free Software is a priority for us, particularly by encouraging greater participation from women. Therefore, when candidates possess equal qualifications, we will prioritize applications from suitably qualified female candidates and other underrepresented groups.</li>
</ul>
<blockquote>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

After

Width:  |  Height:  |  Size: 142 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 292 KiB

View File

@ -4,7 +4,8 @@
<quote
id="zerolo"
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg">
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg"
frontpage="yes">
<name>
Tomás Zerolo (Self-employed IT specialist)
</name>

View File

@ -4,7 +4,8 @@
<quote
id="zerolo"
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg">
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg"
frontpage="yes">
<name>
Tomás Zerolo (Specialista IT autonomo)
</name>

View File

@ -4,7 +4,8 @@
<quote
id="zerolo"
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg">
image="https://pics.fsfe.org/uploads/big/6496ea592fcc4803aaef3930763e84dd.jpg"
frontpage="yes">
<name>
Tomás Zerolo (Zelfstandig IT-specialist)
</name>

View File

@ -10,12 +10,11 @@
</name>
<text>
Prova a pensare a queste affermazioni: <br/>
"È meglio se controlliamo come puoi o non puoi usare i tuoi strumenti."<br/>
"È meglio se non sai come funzionano i tuoi strumenti."<br/>
"È meglio se ti proibiamo di condividere i tuoi strumenti."<br/>
"È meglio se non ti è permesso migliorare i tuoi strumenti."<br/>
"È meglio se noi controlliamo come puoi o meno utilizzare i tuoi strumenti".<br/>
"È meglio se non sai come funzionano i tuoi strumenti".<br/>
"È meglio se noi ti proibiamo di condividere i tuoi strumenti".<br/>
"È meglio se non sei autorizzato a migliorare i tuoi strumenti".<br/>
Se trovi assurde queste affermazioni tanto quanto le trovo io, allora puoi capire perché sono un'attivista del Software Libero.
</text>
<watch link="interviews/weitzhofer.html#video-30s-bernhard">
Guarda la nostra breve intervista con Bernhard

View File

@ -4,8 +4,7 @@
<quote
id="weitzhofer"
image="https://pics.fsfe.org/uploads/small/3ebc16accdc26575800644c530e3f721.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/small/3ebc16accdc26575800644c530e3f721.jpg">
<name>
Bernhard Weitzhofer (Ambtenaar)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (IT Business Analyst)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (Analista de Negocios de TI)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (Analista del business IT)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (IT Business Analist)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (Analista IT de Negócio)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Нермин Джаник (деловой аналитик компьютерных технологий)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="canik"
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/big/d21449f4f7dd061b4e33210ec60bf81f.jpg">
<name>
Nermin Canik (Analist Biznesi TI)
</name>

View File

@ -4,7 +4,8 @@
<quote
id="mueller"
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg">
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg"
frontpage="yes">
<name>
Reinhard Müller (Software Developer)
</name>

View File

@ -4,7 +4,8 @@
<quote
id="mueller"
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg">
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg"
frontpage="yes">
<name>
Reinhard Müller (Sviluppatore software)
</name>

View File

@ -4,7 +4,8 @@
<quote
id="mueller"
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg">
image="https://pics.fsfe.org/uploads/big/0a57bb700b1c1a45694230e1a846c0f2.jpg"
frontpage="yes">
<name>
Reinhard Müller (Software-ontwikkelaar)
</name>

View File

@ -4,8 +4,7 @@
<quote
id="hubertz"
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg">
<name>
Johannes Hubertz
</name>

View File

@ -4,8 +4,7 @@
<quote
id="hubertz"
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg">
<name>
Johannes Hubertz
</name>

View File

@ -4,8 +4,7 @@
<quote
id="hubertz"
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg"
frontpage="yes">
image="https://pics.fsfe.org/uploads/small/48f815794bed6cc4f5200cc53a40e088.jpg">
<name>
Johannes Hubertz
</name>

View File

@ -4,7 +4,8 @@
<quote
id="zarl-zierl"
image="https://pics.fsfe.org/uploads/big/4056f837ac63fd111a78c6f46184c1aa.jpg">
image="https://pics.fsfe.org/uploads/big/4056f837ac63fd111a78c6f46184c1aa.jpg"
frontpage="yes">
<name>
Johannes Zarl-Zierl (IT Professional)
</name>

Some files were not shown because too many files have changed in this diff Show More