diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 03c74e4..4abfd57 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -18,6 +18,14 @@ on: description: 'Python version used to generate docs' required: false default: '3' + language: + description: 'Language to build' + required: false + default: 'en' + sphinxopts_json: + description: 'JSON array of language-specific sphinxopts' + required: false + default: '[]' workflow_call: inputs: reference: @@ -36,6 +44,14 @@ on: description: 'Python version used to generate docs' default: '3' type: string + language: + description: 'Language to build' + default: 'en' + type: string + sphinxopts_json: + description: 'JSON array of language-specific sphinxopts' + default: '[]' + type: string permissions: contents: read pages: write @@ -46,6 +62,8 @@ jobs: outputs: dist_version: ${{ steps.get-version.outputs.dist_version }} major_minor: ${{ steps.get-version.outputs.major_minor }} + translation_ref: ${{ steps.resolve-translation.outputs.translation_ref }} + translation_repository: ${{ steps.resolve-translation.outputs.translation_repository }} steps: - uses: actions/setup-python@master with: @@ -56,9 +74,48 @@ jobs: ref: ${{ inputs.reference }} - id: get-version run: | - echo "dist_version=$(python tools/extensions/patchlevel.py --short)" >> $GITHUB_OUTPUT - echo "major_minor=$(python -c 'from tools.extensions.patchlevel import get_version_info; print(get_version_info()[0])')" >> $GITHUB_OUTPUT + echo "dist_version=$(python tools/extensions/patchlevel.py --short)" >> "$GITHUB_OUTPUT" + echo "major_minor=$(python -c 'from tools.extensions.patchlevel import get_version_info; print(get_version_info()[0])')" >> "$GITHUB_OUTPUT" working-directory: ./Doc + - name: Resolve translation branch + id: resolve-translation + if: ${{ inputs.language != 'en' }} + env: + LANGUAGE_TAG: ${{ inputs.language }} + MAJOR_MINOR: ${{ steps.get-version.outputs.major_minor }} + run: | + python - <<'PY' + import bisect + import os + import re + import subprocess + + language = os.environ["LANGUAGE_TAG"] + target_version = os.environ["MAJOR_MINOR"] + repo_tag = language.replace("_", "-").lower() + repo_url = f"https://github.com/python/python-docs-{repo_tag}.git" + + remote_branches = subprocess.check_output( + ["git", "ls-remote", "--heads", repo_url], text=True + ) + branches = sorted( + { + match.group(1) + for match in re.finditer(r"refs/heads/([0-9]+\.[0-9]+)$", remote_branches, re.M) + }, + key=lambda value: tuple(map(int, value.split("."))), + ) + if not branches: + raise SystemExit(f"No numeric translation branches found for {repo_url}") + branch_tuples = [tuple(map(int, value.split("."))) for value in branches] + target_tuple = tuple(map(int, target_version.split("."))) + index = bisect.bisect_left(branch_tuples, target_tuple) + chosen = branches[index] if index < len(branches) else branches[-1] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"translation_ref={chosen}\n") + file.write(f"translation_repository=python/python-docs-{repo_tag}\n") + PY build-html: needs: prepare runs-on: ubuntu-latest @@ -70,20 +127,64 @@ jobs: with: repository: ${{ inputs.repository }} ref: ${{ inputs.reference }} + - name: Checkout translation sources + if: ${{ inputs.language != 'en' }} + uses: actions/checkout@master + with: + repository: ${{ needs.prepare.outputs.translation_repository }} + ref: ${{ needs.prepare.outputs.translation_ref }} + path: Doc/locale/${{ inputs.language }}/LC_MESSAGES + - id: build-settings + env: + LANGUAGE_TAG: ${{ inputs.language }} + SPHINXOPTS_JSON: ${{ inputs.sphinxopts_json }} + run: | + python - <<'PY' + import json + import os + import shlex + + language = os.environ["LANGUAGE_TAG"] + config_sphinxopts = [token for opt in json.loads(os.environ["SPHINXOPTS_JSON"]) for token in shlex.split(opt, posix=False)] + common = [] + if language != "en": + try: + import re + with open("Doc/requirements.txt") as f: + m = re.search(r"(?i)^sphinx[=<>!~\s]*(\d+)\.(\d+)", f.read(), re.MULTILINE) + sphinx_ver = (int(m.group(1)), int(m.group(2))) if m else (99, 0) + except FileNotFoundError: + sphinx_ver = (99, 0) + common = [ + "-D", "locale_dirs=locale", + "-D", f"language={language}", + "-D", "gettext_compact=0", + "-D", "exclude_patterns=includes/*.rst,venv/*,README.rst,locale/**", + ] + if sphinx_ver >= (7, 1): + common += ["-D", "translation_progress_classes=1"] + artifact_suffix = "" if language == "en" else f"-{language}" + pdf = [*common, *config_sphinxopts] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"artifact_suffix={artifact_suffix}\n") + file.write(f"common_sphinxopts={shlex.join(common)}\n") + file.write(f"pdf_sphinxopts={shlex.join(pdf)}\n") + PY - run: make venv working-directory: ./Doc - - run: make dist-html + - run: make dist-html SPHINXERRORHANDLING= SPHINXOPTS="${{ steps.build-settings.outputs.common_sphinxopts }}" working-directory: ./Doc - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-html.zip + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-html.zip path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-html.zip if-no-files-found: ignore - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-html.tar.bz2 + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-html.tar.bz2 path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-html.tar.bz2 if-no-files-found: ignore build-text: @@ -97,20 +198,64 @@ jobs: with: repository: ${{ inputs.repository }} ref: ${{ inputs.reference }} + - name: Checkout translation sources + if: ${{ inputs.language != 'en' }} + uses: actions/checkout@master + with: + repository: ${{ needs.prepare.outputs.translation_repository }} + ref: ${{ needs.prepare.outputs.translation_ref }} + path: Doc/locale/${{ inputs.language }}/LC_MESSAGES + - id: build-settings + env: + LANGUAGE_TAG: ${{ inputs.language }} + SPHINXOPTS_JSON: ${{ inputs.sphinxopts_json }} + run: | + python - <<'PY' + import json + import os + import shlex + + language = os.environ["LANGUAGE_TAG"] + config_sphinxopts = [token for opt in json.loads(os.environ["SPHINXOPTS_JSON"]) for token in shlex.split(opt, posix=False)] + common = [] + if language != "en": + try: + import re + with open("Doc/requirements.txt") as f: + m = re.search(r"(?i)^sphinx[=<>!~\s]*(\d+)\.(\d+)", f.read(), re.MULTILINE) + sphinx_ver = (int(m.group(1)), int(m.group(2))) if m else (99, 0) + except FileNotFoundError: + sphinx_ver = (99, 0) + common = [ + "-D", "locale_dirs=locale", + "-D", f"language={language}", + "-D", "gettext_compact=0", + "-D", "exclude_patterns=includes/*.rst,venv/*,README.rst,locale/**", + ] + if sphinx_ver >= (7, 1): + common += ["-D", "translation_progress_classes=1"] + artifact_suffix = "" if language == "en" else f"-{language}" + pdf = [*common, *config_sphinxopts] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"artifact_suffix={artifact_suffix}\n") + file.write(f"common_sphinxopts={shlex.join(common)}\n") + file.write(f"pdf_sphinxopts={shlex.join(pdf)}\n") + PY - run: make venv working-directory: ./Doc - - run: make dist-text + - run: make dist-text SPHINXERRORHANDLING= SPHINXOPTS="${{ steps.build-settings.outputs.common_sphinxopts }}" working-directory: ./Doc - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-text.zip + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-text.zip path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-text.zip if-no-files-found: ignore - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-text.tar.bz2 + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-text.tar.bz2 path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-text.tar.bz2 if-no-files-found: ignore build-texinfo: @@ -124,21 +269,65 @@ jobs: with: repository: ${{ inputs.repository }} ref: ${{ inputs.reference }} + - name: Checkout translation sources + if: ${{ inputs.language != 'en' }} + uses: actions/checkout@master + with: + repository: ${{ needs.prepare.outputs.translation_repository }} + ref: ${{ needs.prepare.outputs.translation_ref }} + path: Doc/locale/${{ inputs.language }}/LC_MESSAGES + - id: build-settings + env: + LANGUAGE_TAG: ${{ inputs.language }} + SPHINXOPTS_JSON: ${{ inputs.sphinxopts_json }} + run: | + python - <<'PY' + import json + import os + import shlex + + language = os.environ["LANGUAGE_TAG"] + config_sphinxopts = [token for opt in json.loads(os.environ["SPHINXOPTS_JSON"]) for token in shlex.split(opt, posix=False)] + common = [] + if language != "en": + try: + import re + with open("Doc/requirements.txt") as f: + m = re.search(r"(?i)^sphinx[=<>!~\s]*(\d+)\.(\d+)", f.read(), re.MULTILINE) + sphinx_ver = (int(m.group(1)), int(m.group(2))) if m else (99, 0) + except FileNotFoundError: + sphinx_ver = (99, 0) + common = [ + "-D", "locale_dirs=locale", + "-D", f"language={language}", + "-D", "gettext_compact=0", + "-D", "exclude_patterns=includes/*.rst,venv/*,README.rst,locale/**", + ] + if sphinx_ver >= (7, 1): + common += ["-D", "translation_progress_classes=1"] + artifact_suffix = "" if language == "en" else f"-{language}" + pdf = [*common, *config_sphinxopts] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"artifact_suffix={artifact_suffix}\n") + file.write(f"common_sphinxopts={shlex.join(common)}\n") + file.write(f"pdf_sphinxopts={shlex.join(pdf)}\n") + PY - run: make venv working-directory: ./Doc - run: sudo apt-get update && sudo apt-get install -y texinfo - - run: make dist-texinfo + - run: make dist-texinfo SPHINXERRORHANDLING= SPHINXOPTS="${{ steps.build-settings.outputs.common_sphinxopts }}" working-directory: ./Doc - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-texinfo.zip + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-texinfo.zip path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-texinfo.zip if-no-files-found: ignore - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-texinfo.tar.bz2 + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-texinfo.tar.bz2 path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-texinfo.tar.bz2 if-no-files-found: ignore build-epub: @@ -152,14 +341,58 @@ jobs: with: repository: ${{ inputs.repository }} ref: ${{ inputs.reference }} + - name: Checkout translation sources + if: ${{ inputs.language != 'en' }} + uses: actions/checkout@master + with: + repository: ${{ needs.prepare.outputs.translation_repository }} + ref: ${{ needs.prepare.outputs.translation_ref }} + path: Doc/locale/${{ inputs.language }}/LC_MESSAGES + - id: build-settings + env: + LANGUAGE_TAG: ${{ inputs.language }} + SPHINXOPTS_JSON: ${{ inputs.sphinxopts_json }} + run: | + python - <<'PY' + import json + import os + import shlex + + language = os.environ["LANGUAGE_TAG"] + config_sphinxopts = [token for opt in json.loads(os.environ["SPHINXOPTS_JSON"]) for token in shlex.split(opt, posix=False)] + common = [] + if language != "en": + try: + import re + with open("Doc/requirements.txt") as f: + m = re.search(r"(?i)^sphinx[=<>!~\s]*(\d+)\.(\d+)", f.read(), re.MULTILINE) + sphinx_ver = (int(m.group(1)), int(m.group(2))) if m else (99, 0) + except FileNotFoundError: + sphinx_ver = (99, 0) + common = [ + "-D", "locale_dirs=locale", + "-D", f"language={language}", + "-D", "gettext_compact=0", + "-D", "exclude_patterns=includes/*.rst,venv/*,README.rst,locale/**", + ] + if sphinx_ver >= (7, 1): + common += ["-D", "translation_progress_classes=1"] + artifact_suffix = "" if language == "en" else f"-{language}" + pdf = [*common, *config_sphinxopts] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"artifact_suffix={artifact_suffix}\n") + file.write(f"common_sphinxopts={shlex.join(common)}\n") + file.write(f"pdf_sphinxopts={shlex.join(pdf)}\n") + PY - run: make venv working-directory: ./Doc - - run: make dist-epub + - run: make dist-epub SPHINXERRORHANDLING= SPHINXOPTS="${{ steps.build-settings.outputs.common_sphinxopts }}" working-directory: ./Doc - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs.epub + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs.epub path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs.epub if-no-files-found: ignore build-pdf: @@ -173,32 +406,95 @@ jobs: with: repository: ${{ inputs.repository }} ref: ${{ inputs.reference }} + - name: Checkout translation sources + if: ${{ inputs.language != 'en' }} + uses: actions/checkout@master + with: + repository: ${{ needs.prepare.outputs.translation_repository }} + ref: ${{ needs.prepare.outputs.translation_ref }} + path: Doc/locale/${{ inputs.language }}/LC_MESSAGES + - id: build-settings + env: + LANGUAGE_TAG: ${{ inputs.language }} + SPHINXOPTS_JSON: ${{ inputs.sphinxopts_json }} + run: | + python - <<'PY' + import json + import os + import shlex + + language = os.environ["LANGUAGE_TAG"] + config_sphinxopts = [token for opt in json.loads(os.environ["SPHINXOPTS_JSON"]) for token in shlex.split(opt, posix=False)] + common = [] + if language != "en": + try: + import re + with open("Doc/requirements.txt") as f: + m = re.search(r"(?i)^sphinx[=<>!~\s]*(\d+)\.(\d+)", f.read(), re.MULTILINE) + sphinx_ver = (int(m.group(1)), int(m.group(2))) if m else (99, 0) + except FileNotFoundError: + sphinx_ver = (99, 0) + common = [ + "-D", "locale_dirs=locale", + "-D", f"language={language}", + "-D", "gettext_compact=0", + "-D", "exclude_patterns=includes/*.rst,venv/*,README.rst,locale/**", + ] + if sphinx_ver >= (7, 1): + common += ["-D", "translation_progress_classes=1"] + artifact_suffix = "" if language == "en" else f"-{language}" + pdf = [*common, *config_sphinxopts] + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as file: + file.write(f"artifact_suffix={artifact_suffix}\n") + file.write(f"common_sphinxopts={shlex.join(common)}\n") + file.write(f"pdf_sphinxopts={shlex.join(pdf)}\n") + PY - run: make venv working-directory: ./Doc - run: sudo apt-get update && sudo apt-get install -y latexmk texlive-xetex fonts-freefont-otf xindy librsvg2-bin - - run: make dist-pdf + - name: Install language-specific texlive packages + env: + LANGUAGE_TAG: ${{ inputs.language }} + run: | + LANGUAGE_TAG=$(echo "$LANGUAGE_TAG" | tr '_' '-' | tr '[:upper:]' '[:lower:]') + case "$LANGUAGE_TAG" in + pt-br) sudo apt-get install -y texlive-lang-portuguese ;; + zh-tw|zh-cn) sudo apt-get install -y texlive-lang-chinese ;; + ko) sudo apt-get install -y texlive-lang-korean ;; + ja) sudo apt-get install -y texlive-lang-japanese texlive-luatex fonts-noto-cjk-extra ;; + esac + - run: make dist-pdf SPHINXERRORHANDLING= SPHINXOPTS="${{ steps.build-settings.outputs.pdf_sphinxopts }}" working-directory: ./Doc + - name: Dump LaTeX log tails on failure + if: failure() + run: | + set -euxo pipefail + for f in Doc/build/latex/*.log; do + echo "===== $f (tail) =====" + tail -n 80 "$f" || true + done - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-pdf-logs.zip + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-pdf-logs.zip path: | ./Doc/build/latex/*.log ./Doc/build/latex/*.tex - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-pdf-a4.zip + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-pdf-a4.zip path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-pdf-a4.zip if-no-files-found: ignore - uses: actions/upload-artifact@master if: always() with: - name: python-${{ needs.prepare.outputs.dist_version }}-docs-pdf-a4.tar.bz2 + name: python-${{ needs.prepare.outputs.dist_version }}${{ steps.build-settings.outputs.artifact_suffix }}-docs-pdf-a4.tar.bz2 path: ./Doc/dist/python-${{ needs.prepare.outputs.dist_version }}*-docs-pdf-a4.tar.bz2 if-no-files-found: ignore publish: - needs: [build-html, build-text, build-texinfo, build-epub, build-pdf] + needs: [prepare, build-html, build-text, build-texinfo, build-epub, build-pdf] if: ${{ !cancelled() && inputs.publish == 'true' }} runs-on: ubuntu-latest environment: @@ -216,9 +512,13 @@ jobs: path: _site continue-on-error: true + - name: Normalize language tag for publishing + id: normalize-lang + run: echo "tag=$(echo '${{ inputs.language }}' | tr '_' '-' | tr '[:upper:]' '[:lower:]')" >> "$GITHUB_OUTPUT" + - name: Prepare site directory run: | - mkdir -p _site/${{ needs.prepare.outputs.major_minor }} + mkdir -p _site/${{ steps.normalize-lang.outputs.tag }}/${{ needs.prepare.outputs.major_minor }} # Remove git metadata; safe even if checkout above did not succeed rm -rf _site/.git @@ -230,11 +530,11 @@ jobs: - name: Copy new archives into site directory run: | - # Copy generated archives (zip, tar.bz2, epub) into _site//, + # Copy generated archives (zip, tar.bz2, epub) into _site///, # excluding PDF build logs which are for debugging only. find artifacts/ -type f \( -name "*.zip" -o -name "*.tar.bz2" -o -name "*.epub" \) \ ! -name "python-*-pdf-logs.zip" \ - -exec cp {} _site/${{ needs.prepare.outputs.major_minor }}/ \; + -exec cp {} _site/${{ steps.normalize-lang.outputs.tag }}/${{ needs.prepare.outputs.major_minor }}/ \; - name: Generate per-version directory listing run: | @@ -247,39 +547,40 @@ jobs: root = Path("_site") version_pattern = re.compile(r"^\d+\.\d+$") - for version_dir in sorted( - p for p in root.iterdir() if p.is_dir() and version_pattern.match(p.name) - ): - files = sorted( - p for p in version_dir.iterdir() if p.is_file() and p.name != "index.html" - ) - rows = [] - for file_path in files: - stat = file_path.stat() - timestamp = datetime.fromtimestamp(stat.st_mtime, timezone.utc).strftime( - "%Y-%m-%d %H:%M:%S UTC" - ) - rows.append( - f'{escape(file_path.name)}{timestamp}{stat.st_size}' + for lang_dir in sorted(p for p in root.iterdir() if p.is_dir()): + for version_dir in sorted( + p for p in lang_dir.iterdir() if p.is_dir() and version_pattern.match(p.name) + ): + files = sorted( + p for p in version_dir.iterdir() if p.is_file() and p.name != "index.html" ) + rows = [] + for file_path in files: + stat = file_path.stat() + timestamp = datetime.fromtimestamp(stat.st_mtime, timezone.utc).strftime( + "%Y-%m-%d %H:%M:%S UTC" + ) + rows.append( + f'{escape(file_path.name)}{timestamp}{stat.st_size}' + ) - relative_path = f"/{version_dir.relative_to(root).as_posix()}/" - html = [ - "", - '', - 'Directory listing', - "", - f"

Path: {escape(relative_path)}

", - "", - '', - "", - *rows, - "", - "
FilenameTimestamp (UTC)Size (bytes)
", - "", - "", - ] - (version_dir / "index.html").write_text("\n".join(html) + "\n", encoding="utf-8") + relative_path = f"/{version_dir.relative_to(root).as_posix()}/" + html = [ + "", + '', + 'Directory listing', + "", + f"

Path: {escape(relative_path)}

", + "", + '', + "", + *rows, + "", + "
FilenameTimestamp (UTC)Size (bytes)
", + "", + "", + ] + (version_dir / "index.html").write_text("\n".join(html) + "\n", encoding="utf-8") PY - name: Upload Pages artifact diff --git a/.github/workflows/schedule.yaml b/.github/workflows/schedule.yaml index d3b89d1..fbb149a 100644 --- a/.github/workflows/schedule.yaml +++ b/.github/workflows/schedule.yaml @@ -23,12 +23,53 @@ jobs: outputs: versions: ${{ steps.get-versions.outputs.versions }} steps: - - name: Get supported Python versions + - name: Get supported Python versions and translations id: get-versions run: | - versions=$(curl -sf https://peps.python.org/api/release-cycle.json | \ - jq -c '[to_entries[] | select(.value.status != "end-of-life" and .value.status != "planned") | {branch: (.value.branch // .key), python_version: (if .key == "3.10" then "3.12" else "3" end)}]') - echo "versions=$versions" >> "$GITHUB_OUTPUT" + python - <<'PY' >> "$GITHUB_OUTPUT" + import json + import tomllib + from urllib.request import urlopen + + with urlopen("https://peps.python.org/api/release-cycle.json", timeout=30) as response: + release_cycle = json.load(response) + + versions = [ + { + "branch": release.get("branch") or version, + "python_version": "3.12" if version == "3.10" else "3", + } + for version, release in release_cycle.items() + if release.get("status") not in {"end-of-life", "planned"} + ] + + with urlopen( + "https://raw.githubusercontent.com/python/docsbuild-scripts/main/config.toml", + timeout=30, + ) as response: + config = tomllib.loads(response.read().decode("utf-8")) + + defaults = config.get("defaults", {}) + default_in_prod = defaults.get("in_prod", True) + default_sphinxopts = defaults.get("sphinxopts", []) + + languages = [] + for language, language_config in config.get("languages", {}).items(): + if not language_config.get("in_prod", default_in_prod): + continue + languages.append( + { + "language": language, + "sphinxopts_json": json.dumps( + language_config.get("sphinxopts", default_sphinxopts), + ensure_ascii=False, + ), + } + ) + + matrix = [{**version, **language} for version in versions for language in languages] + print(f"versions={json.dumps(matrix, ensure_ascii=False)}") + PY build: needs: get-versions strategy: @@ -42,6 +83,8 @@ jobs: with: reference: ${{ matrix.branch }} python_version: ${{ matrix.python_version }} + language: ${{ matrix.language }} + sphinxopts_json: ${{ matrix.sphinxopts_json }} publish: ${{ 'false' }} deploy: @@ -79,20 +122,44 @@ jobs: - name: Copy new archives into site directory run: | - # Copy generated archives (zip, tar.bz2, epub) into _site//, + # Copy generated archives (zip, tar.bz2, epub) into _site///, # excluding PDF build logs which are for debugging only. - # Extract major.minor from filenames like python-3.14.0-docs-html.zip. - find artifacts/ -type f \( -name "*.zip" -o -name "*.tar.bz2" -o -name "*.epub" \) \ - ! -name "python-*-pdf-logs.zip" | while read -r f; do - filename=$(basename "$f") - major_minor=$(echo "$filename" | sed -n 's/^python-\([0-9]*\.[0-9]*\).*/\1/p') - mkdir -p "_site/$major_minor" - cp "$f" "_site/$major_minor/" - done + # Extract major.minor and language from filenames like: + # python-3.14.0-docs-html.zip (English zip) + # python-3.14.0-docs.epub (English epub) + # python-3.14.0-fr-docs-html.zip (French zip) + # python-3.14.0-fr-docs.epub (French epub) + python - <<'PY' + import re + import shutil + from pathlib import Path + + artifacts = Path("artifacts") + site = Path("_site") + name_re = re.compile( + r"^python-(\d+\.\d+)[\d.]*(?:-(?!docs|pdf)([a-zA-Z][a-zA-Z0-9_]*))?-(?:docs|pdf)" + ) + for f in artifacts.rglob("*"): + if not f.is_file(): + continue + name = f.name + if not (name.endswith(".zip") or name.endswith(".tar.bz2") or name.endswith(".epub")): + continue + if re.search(r"-pdf-logs\.zip$", name): + continue + m = name_re.match(name) + if not m: + continue + major_minor = m.group(1) + lang = (m.group(2) or "en").replace("_", "-").lower() + dest = site / lang / major_minor + dest.mkdir(parents=True, exist_ok=True) + shutil.copy2(f, dest / name) + PY - name: Symlink 3 to stable version run: | - # Create a relative symlink _site/3 -> (e.g. 3 -> 3.14), + # Create a relative symlink _site/en/3 -> (e.g. 3 -> 3.14), # pointing to the first "bugfix" (stable) version from release-cycle JSON. stable=$(curl -sf https://peps.python.org/api/release-cycle.json | \ jq -r '[to_entries[] | select(.value.status == "bugfix")] | first | .key') @@ -101,8 +168,27 @@ jobs: exit 1 fi # Remove existing 3 directory or symlink before creating new symlink - rm -rf _site/3 - ln -s "$stable" _site/3 + rm -rf _site/en/3 + ln -s "$stable" _site/en/3 + + - name: Symlink bare version paths to en/ for backwards compatibility + run: | + # Create _site/ -> en/ symlinks so that old + # URLs like /3.14/ continue to work after content moved to /en/3.14/. + python - <<'PY' + import re + from pathlib import Path + + site = Path("_site") + en_dir = site / "en" + version_pattern = re.compile(r"^\d+\.\d+$") + for version_dir in en_dir.iterdir(): + if not version_dir.is_dir() or not version_pattern.match(version_dir.name): + continue + link = site / version_dir.name + if not link.exists() and not link.is_symlink(): + link.symlink_to(Path("en") / version_dir.name) + PY - name: Generate per-version directory listing run: | @@ -115,39 +201,40 @@ jobs: root = Path("_site") version_pattern = re.compile(r"^\d+\.\d+$") - for version_dir in sorted( - p for p in root.iterdir() if p.is_dir() and version_pattern.match(p.name) - ): - files = sorted( - p for p in version_dir.iterdir() if p.is_file() and p.name != "index.html" - ) - rows = [] - for file_path in files: - stat = file_path.stat() - timestamp = datetime.fromtimestamp(stat.st_mtime, timezone.utc).strftime( - "%Y-%m-%d %H:%M:%S UTC" - ) - rows.append( - f'{escape(file_path.name)}{timestamp}{stat.st_size}' + for lang_dir in sorted(p for p in root.iterdir() if p.is_dir()): + for version_dir in sorted( + p for p in lang_dir.iterdir() if p.is_dir() and version_pattern.match(p.name) + ): + files = sorted( + p for p in version_dir.iterdir() if p.is_file() and p.name != "index.html" ) + rows = [] + for file_path in files: + stat = file_path.stat() + timestamp = datetime.fromtimestamp(stat.st_mtime, timezone.utc).strftime( + "%Y-%m-%d %H:%M:%S UTC" + ) + rows.append( + f'{escape(file_path.name)}{timestamp}{stat.st_size}' + ) - relative_path = f"/{version_dir.relative_to(root).as_posix()}/" - html = [ - "", - '', - 'Directory listing', - "", - f"

Path: {escape(relative_path)}

", - "", - '', - "", - *rows, - "", - "
FilenameTimestamp (UTC)Size (bytes)
", - "", - "", - ] - (version_dir / "index.html").write_text("\n".join(html) + "\n", encoding="utf-8") + relative_path = f"/{version_dir.relative_to(root).as_posix()}/" + html = [ + "", + '', + 'Directory listing', + "", + f"

Path: {escape(relative_path)}

", + "", + '', + "", + *rows, + "", + "
FilenameTimestamp (UTC)Size (bytes)
", + "", + "", + ] + (version_dir / "index.html").write_text("\n".join(html) + "\n", encoding="utf-8") PY - name: Upload Pages artifact diff --git a/README.md b/README.md index 2580ae6..738a820 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,34 @@ # python-docs-offline -Offline builds of Python documentation in various formats: + +Automated daily builds of Python documentation for offline use, published to +[GitHub Pages](https://m-aciek.github.io/python-docs-offline/). + +## Available formats + * PDF * Zipped offline HTML * Plain text * Texinfo * EPUB +## Supported versions and languages + +Builds are generated for all currently supported CPython versions (fetched +dynamically from the [Python release cycle](https://peps.python.org/api/release-cycle.json)) +and all languages that are active in production on [docs.python.org](https://docs.python.org) +(fetched dynamically from [docsbuild-scripts config](https://github.com/python/docsbuild-scripts/blob/main/config.toml)). + +## How it works + +A scheduled GitHub Actions workflow runs daily. It queries the CPython release +cycle API and the docsbuild-scripts configuration to determine which +(version, language) combinations to build. Each combination is built using +the [build.yaml](.github/workflows/build.yaml) reusable workflow, which checks +out the CPython source, builds the documentation with Sphinx, and uploads the +resulting archives as artifacts. After all builds complete, a single deploy job +merges the artifacts into the `gh-pages` branch and publishes them to GitHub +Pages. + ## Rejected idea I wanted to present a waiting page in place(s) of file(s) that are being generated, that would render similarly to 404 page.