# Copyright (c) typedef int GmbH, Germany, 2025. All rights reserved.
#

# IMPORTANT:
# Ubuntu: sudo apt install gir1.2-girepository-2.0-dev
# Debian: https://pkgs.org/search/?q=girepository


# -----------------------------------------------------------------------------
# -- just global configuration
# -----------------------------------------------------------------------------

set unstable := true
set positional-arguments := true
set script-interpreter := ['uv', 'run', '--script']

# uv env vars
# see: https://docs.astral.sh/uv/reference/environment/

# project base directory = directory of this justfile
PROJECT_DIR := justfile_directory()

# Default recipe: show project header and list all recipes
default:
    #!/usr/bin/env bash
    set -e
    VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*= *"\(.*\)"/\1/')
    GIT_REV=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
    echo ""
    echo "==============================================================================="
    echo "                             Autobahn|Python                                   "
    echo ""
    echo "         WebSocket & WAMP for Python on Twisted and asyncio                   "
    echo ""
    echo "   Python Package:         autobahn                                           "
    echo "   Python Package Version: ${VERSION}                                         "
    echo "   Git Version:            ${GIT_REV}                                         "
    echo "   Protocol Specification: https://wamp-proto.org/                            "
    echo "   Documentation:          https://autobahn.readthedocs.io                    "
    echo "   Package Releases:       https://pypi.org/project/autobahn/                 "
    echo "   Nightly/Dev Releases:   https://github.com/crossbario/autobahn-python/releases"
    echo "   Source Code:            https://github.com/crossbario/autobahn-python      "
    echo "   Copyright:              typedef int GmbH (Germany/EU)                      "
    echo "   License:                MIT License                                        "
    echo ""
    echo "       >>>   Created by The WAMP/Autobahn/Crossbar.io OSS Project   <<<       "
    echo "==============================================================================="
    echo ""
    just --list
    echo ""

# Tell uv to always copy files instead of trying to hardlink them.
# set export UV_LINK_MODE := 'copy'

# Tell uv to use project-local cache directory.
export UV_CACHE_DIR := './.uv-cache'

# Autobahn|Testsuite (https://github.com/crossbario/autobahn-testsuite) Docker image to use.
AUTOBAHN_TESTSUITE_IMAGE := 'crossbario/autobahn-testsuite:25.10.1'

# Default output directory for Autobahn|Testsuite reports (HTML files).
AUTOBAHN_TESTSUITE_OUTPUT_DIR := justfile_directory() / '.wstest'

# Default config directory for Autobahn|Testsuite configuration (JSON files).
AUTOBAHN_TESTSUITE_CONFIG_DIR := justfile_directory() / 'wstest'

# Use this common single directory for all uv venvs.
# Use absolute path (based on PROJECT_DIR) to avoid issues when cd'ing in recipes
VENV_DIR := PROJECT_DIR / '.venvs'

# Define a justfile-local variable for our environments.
ENVS := 'cpy314 cpy313 cpy312 cpy311 pypy311'

# Internal helper to map Python version short name to full uv version
_get-spec short_name:
    #!/usr/bin/env bash
    set -e
    case {{short_name}} in
        cpy314)  echo "cpython-3.14";;  # cpython-3.14.0b3-linux-x86_64-gnu
        cpy313)  echo "cpython-3.13";;  # cpython-3.13.5-linux-x86_64-gnu
        cpy312)  echo "cpython-3.12";;  # cpython-3.12.11-linux-x86_64-gnu
        cpy311)  echo "cpython-3.11";;  # cpython-3.11.13-linux-x86_64-gnu
        pypy311) echo "pypy-3.11";;     # pypy-3.11.11-linux-x86_64-gnu
        *)       echo "Unknown environment: {{short_name}}" >&2; exit 1;;
    esac

# uv python install pypy-3.11-linux-aarch64-gnu --preview --verbose
# file /home/oberstet/.local/share/uv/python/pypy-3.11.11-linux-aarch64-gnu/bin/pypy3.11
# /home/oberstet/.local/share/uv/python/pypy-3.11.11-linux-aarch64-gnu/bin/pypy3.11: ELF 64-bit LSB executable, ARM aarch64, version 1 (SYSV), dynamically linked, interpreter /lib/ld-linux-aarch64.so.1, BuildID[sha1]=150f642a07dc36d3e465beaa0109e70da76ca67e, for GNU/Linux 3.7.0, stripped

# Internal helper that calculates and prints the system-matching venv name.
_get-system-venv-name:
    #!/usr/bin/env bash
    set -e
    SYSTEM_VERSION=$(/usr/bin/python3 -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
    ENV_NAME="cpy$(echo ${SYSTEM_VERSION} | tr -d '.')"

    if ! echo "{{ ENVS }}" | grep -q -w "${ENV_NAME}"; then
        echo "Error: System Python (${SYSTEM_VERSION}) maps to '${ENV_NAME}', which is not a supported environment in this project." >&2
        exit 1
    fi
    # The only output of this recipe is the name itself.
    echo "${ENV_NAME}"

# Helper recipe to get the python executable path for a venv
_get-venv-python venv="":
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        VENV_NAME=$(just --quiet _get-system-venv-name)
    fi
    VENV_PATH="{{VENV_DIR}}/${VENV_NAME}"

    # In your main recipes, replace direct calls to python with:
    # VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    # ${VENV_PYTHON} -V
    # ${VENV_PYTHON} -m pip -V

    if [[ "$OS" == "Windows_NT" ]]; then
        echo "${VENV_PATH}/Scripts/python.exe"
    else
        echo "${VENV_PATH}/bin/python3"
    fi

# -----------------------------------------------------------------------------
# -- General/global helper recipes
# -----------------------------------------------------------------------------

# Setup bash tab completion for the current user (to activate: `source ~/.config/bash_completion`).
setup-completion:
    #!/usr/bin/env bash
    set -e

    COMPLETION_FILE="${XDG_CONFIG_HOME:-$HOME/.config}/bash_completion"
    MARKER="# --- Just completion ---"

    echo "==> Setting up bash tab completion for 'just'..."

    # Check if we have already configured it.
    if [ -f "${COMPLETION_FILE}" ] && grep -q "${MARKER}" "${COMPLETION_FILE}"; then
        echo "--> 'just' completion is already configured."
        exit 0
    fi

    echo "--> Configuration not found. Adding it now..."

    # 1. Ensure the directory exists.
    mkdir -p "$(dirname "${COMPLETION_FILE}")"

    # 2. Add our marker comment to the file.
    echo "" >> "${COMPLETION_FILE}"
    echo "${MARKER}" >> "${COMPLETION_FILE}"

    # 3. CRITICAL: Run `just` and append its raw output directly to the file.
    #    No `echo`, no `eval`, no quoting hell. Just run and redirect.
    just --completions bash >> "${COMPLETION_FILE}"

    echo "--> Successfully added completion logic to ${COMPLETION_FILE}."

    echo ""
    echo "==> Setup complete. Please restart your shell or run the following command:"
    echo "    source \"${COMPLETION_FILE}\""

# Remove ALL generated files, including venvs, caches, and build artifacts. WARNING: This is a destructive operation.
distclean:
    #!/usr/bin/env bash
    set -e

    echo "==> Performing a deep clean (distclean)..."

    # 1. Remove top-level directories known to us.
    #    This is fast for the common cases.
    echo "--> Removing venvs, cache, and build/dist directories..."
    rm -rf {{UV_CACHE_DIR}} {{VENV_DIR}} build/ dist/ wheelhouse/ .pytest_cache/ .ruff_cache/ .ty/
    rm -rf .wstest docs/_build/

    rm -f ./*.so
    rm -f ./.coverage.*
    rm -rf ./_trial_temp

    # 2. Use `find` to hunt down and destroy nested artifacts that can be
    #    scattered throughout the source tree. This is the most thorough part.
    echo "--> Searching for and removing nested Python caches..."
    find . -type d -name "__pycache__" -exec rm -rf {} +

    echo "--> Searching for and removing compiled Python files..."
    find . -type f -name "*.pyc" -delete
    find . -type f -name "*.pyo" -delete

    echo "--> Searching for and removing setuptools egg-info directories..."
    find . -type d -name "*.egg-info" -exec rm -rf {} +

    echo "--> Searching for and removing coverage data..."
    rm -f .coverage
    find . -type f -name ".coverage.*" -delete

    echo "==> Distclean complete. The project is now pristine."

# -----------------------------------------------------------------------------
# -- Python virtual environments
# -----------------------------------------------------------------------------

# List all Python virtual environments
list-all:
    #!/usr/bin/env bash
    set -e
    echo
    echo "Available CPython run-times:"
    echo "============================"
    echo
    uv python list --all-platforms cpython
    echo
    echo "Available PyPy run-times:"
    echo "========================="
    echo
    uv python list --all-platforms pypy
    echo
    echo "Mapped Python run-time shortname => full version:"
    echo "================================================="
    echo
    # This shell loop correctly uses a SHELL variable ($env), not a just variable.
    for env in {{ENVS}}; do
        # We call our helper recipe to get the spec for the current env.
        # The `--quiet` flag is important to only capture the `echo` output.
        spec=$(just --quiet _get-spec "$env")
        echo "  - $env => $spec"
    done
    echo
    echo "Create a Python venv using: just create <shortname>"

# Create a single Python virtual environment (usage: `just create cpy314` or `just create`)
create venv="":
    #!/usr/bin/env bash
    set -e

    VENV_NAME="{{ venv }}"

    # This is the "default parameter" logic.
    # If VENV_NAME is empty (because `just create` was run), calculate the default.
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi

    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    # Only create the venv if it doesn't already exist
    if [ ! -d "${VENV_PATH}" ]; then
        # Get the Python spec just-in-time
        PYTHON_SPEC=$(just --quiet _get-spec "${VENV_NAME}")

        echo "==> Creating Python virtual environment '${VENV_NAME}' using ${PYTHON_SPEC} in ${VENV_PATH}..."
        mkdir -p "{{ VENV_DIR }}"
        uv venv --seed --python "${PYTHON_SPEC}" "${VENV_PATH}"
        echo "==> Successfully created venv '${VENV_NAME}'."
    else
        echo "==> Python virtual environment '${VENV_NAME}' already exists in ${VENV_PATH}."
    fi

    ${VENV_PYTHON} -V
    ${VENV_PYTHON} -m pip -V

    echo "==> Activate Python virtual environment with: source ${VENV_PATH}/bin/activate"

# Meta-recipe to run `create` on all environments
create-all:
    #!/usr/bin/env bash
    for venv in {{ENVS}}; do
        just create ${venv}
    done

# Get the version of a single virtual environment's Python (usage: `just version cpy314`)
version venv="":
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"

    # This is the "default parameter" logic.
    # If VENV_NAME is empty (because `just create` was run), calculate the default.
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi

    if [ -d "{{ VENV_DIR }}/${VENV_NAME}" ]; then
        echo "==> Python virtual environment '${VENV_NAME}' exists:"
        "{{VENV_DIR}}/${VENV_NAME}/bin/python" -V
    else
        echo "==>  Python virtual environment '${VENV_NAME}' does not exist."
    fi
    echo ""

# Get versions of all Python virtual environments
version-all:
    #!/usr/bin/env bash
    for venv in {{ENVS}}; do
        just version ${venv}
    done

# Make Python packages installed by the OS package manager available in a managed venv. Usage: `just link-system-packages "" "/usr/lib/kicad-nightly/lib/python3/dist-packages"`
link-system-packages venv="" vendors="": (create venv)
    #!/usr/bin/env bash
    set -euo pipefail

    VENV_NAME="{{ venv }}"
    VENDOR_PATHS="{{ vendors }}"

    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi

    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    if [ ! -d "${VENV_PATH}" ] || [ ! -f "${VENV_PATH}/bin/python" ]; then
        echo "✗ Error: Virtual environment '${VENV_NAME}' not found at '${VENV_PATH}'." >&2
        exit 1
    fi
    echo "✓ Found virtual environment: ${VENV_PATH}"

    SYSTEM_PYTHON="/usr/bin/python3"
    SYSTEM_VERSION=$(${SYSTEM_PYTHON} -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")

    # Collect all relevant site-packages directories (Debian + Ubuntu quirks)
    SYSTEM_SITE_PACKAGES=$(${SYSTEM_PYTHON} -c "import sysconfig, site, os, sys; paths={sysconfig.get_path('purelib')}; [paths.add(p) for p in site.getsitepackages() if os.path.isdir(p)]; print('\n'.join(sorted(paths)))")

    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    VENV_VERSION=$(${VENV_PYTHON} -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
    VENV_SITE_PACKAGES=$(${VENV_PYTHON} -c "import sysconfig; print(sysconfig.get_path('purelib'))")

    echo "  - System Python ${SYSTEM_VERSION} site-packages: ${SYSTEM_SITE_PACKAGES}"
    echo "  - Venv   Python ${VENV_VERSION} site-packages: ${VENV_SITE_PACKAGES}"

    if [ "${VENV_VERSION}" != "${SYSTEM_VERSION}" ]; then
        echo "✗ Error: Python version mismatch!" >&2
        echo "  System is ${SYSTEM_VERSION}, but venv is ${VENV_VERSION}." >&2
        echo "  Cannot link system packages due to risk of binary incompatibility." >&2
        # exit 1
    fi
    echo "✓ Python versions match."

    PTH_FILE="${VENV_SITE_PACKAGES}/__system_packages.pth"
    echo "==> Writing link file: ${PTH_FILE}"

    {
        # system paths (multi-line safe)
        while IFS= read -r sp; do
            if [ -d "$sp" ]; then
                echo "$sp"
            else
                echo "⚠ Warning: system path not found: $sp" >&2
            fi
        done <<< "${SYSTEM_SITE_PACKAGES}"

        # vendor paths (comma/space separated)
        if [ -n "${VENDOR_PATHS}" ]; then
            IFS=', ' read -ra VENDOR_ARRAY <<< "${VENDOR_PATHS}"
            for vp in "${VENDOR_ARRAY[@]}"; do
                if [ -d "${vp}" ]; then
                    echo "${vp}"
                else
                    echo "⚠ Warning: vendor path not found: ${vp}" >&2
                fi
            done
        fi
    } > "${PTH_FILE}"

    echo "✓ Done."
    echo
    echo "Linked paths in $(basename "${PTH_FILE}"):"
    echo
    cat "${PTH_FILE}"
    echo

# -----------------------------------------------------------------------------
# -- Installation and Test
# -----------------------------------------------------------------------------

# Install this package and its run-time dependencies in a single environment (usage: `just install cpy314` or `just install`)
install venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Installing package with package runtime dependencies in ${VENV_NAME}..."
    # uv pip install --python "{{VENV_DIR}}/${VENV_NAME}/bin/python" .[all]
    ${VENV_PYTHON} -m pip install .[all]

# Install this package in development (editable) mode and its run-time dependencies in a single environment (usage: `just install-dev cpy314` or `just install-dev`)
install-dev venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Installing package - in editable mode - with package runtime dependencies in ${VENV_NAME}..."
    # uv pip install --python "{{VENV_DIR}}/${VENV_NAME}/bin/python" -e .[all]
    ${VENV_PYTHON} -m pip install -e .[all]

# Install with locally editable WAMP packages for cross-repo development (usage: `just install-dev-local cpy312`)
install-dev-local venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    echo "==> Installing WAMP packages in editable mode from local repos..."
    echo "==> Looking for sibling repos (../txaio)..."

    # Install local WAMP packages in editable mode
    # txaio - no extras needed
    if [ -d "../txaio" ]; then
        echo "  ✓ Installing txaio from ../txaio"
        ${VENV_PYTHON} -m pip install -e "../txaio"
    else
        echo "  ⚠ Warning: ../txaio not found, skipping"
    fi

    echo "==> Installing autobahn in editable mode with [all,dev] extras..."
    ${VENV_PYTHON} -m pip install -e .[all,dev] --upgrade --upgrade-strategy only-if-needed

# Build NVX (Native Vector Extensions) CFFI modules for development/editable installs
# This is needed because hatchling's build hook only compiles during wheel builds.
# For editable installs in CI or local development, we need to compile manually.
#
# The .so files are placed in src/ (not src/autobahn/nvx/) because:
# - CFFI names the modules as top-level modules (e.g., "_nvx_utf8validator")
# - Python imports look for top-level modules in sys.path roots
# - For editable installs, src/ is on sys.path, so .so files there are importable
#
# (usage: `just build-nvx cpy314` or with explicit NVX: `AUTOBAHN_USE_NVX=1 just build-nvx cpy314`)
build-nvx venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    echo "==> Building NVX CFFI extension modules in ${VENV_NAME}..."

    # Ensure cffi is available
    ${VENV_PYTHON} -m pip install "cffi>=2.0.0" --quiet

    # Build modules in src/ directory (for editable install sys.path compatibility)
    # The CFFI ffi.compile() writes output to current directory
    # VENV_DIR and PROJECT_DIR are absolute paths, so VENV_PATH is absolute too
    SRC_DIR="{{ PROJECT_DIR }}/src"
    NVX_DIR="{{ PROJECT_DIR }}/src/autobahn/nvx"

    echo "    Building _nvx_utf8validator..."
    (cd "${SRC_DIR}" && ${VENV_PYTHON} "${NVX_DIR}/_utf8validator.py")

    echo "    Building _nvx_xormasker..."
    (cd "${SRC_DIR}" && ${VENV_PYTHON} "${NVX_DIR}/_xormasker.py")

    # List built artifacts
    echo "==> Built NVX artifacts:"
    ls -la "${SRC_DIR}"/_nvx_*.so 2>/dev/null || echo "    (no .so files found - check for errors above)"

    echo "==> NVX build complete for ${VENV_NAME}."

# Meta-recipe to run `install` on all environments
install-all:
    #!/usr/bin/env bash
    set -e
    for venv in {{ENVS}}; do
        just install ${venv}
    done

# Meta-recipe to run `install-dev` on all environments
install-dev-all:
    #!/usr/bin/env bash
    for venv in {{ENVS}}; do
        just install-dev ${venv}
    done

# Upgrade dependencies in a single environment (usage: `just upgrade cpy314`)
upgrade venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Upgrading dependencies in ${VENV_NAME}..."
    ${VENV_PYTHON} -m pip install --upgrade pip
    ${VENV_PYTHON} -m pip install --upgrade -e .[all,dev]
    echo "==> Dependencies upgraded in ${VENV_NAME}."

# Meta-recipe to run `upgrade` on all environments
upgrade-all:
    #!/usr/bin/env bash
    set -e
    for venv in {{ENVS}}; do
        just upgrade ${venv}
    done

# -----------------------------------------------------------------------------
# -- Installation: Tools (Ruff, Sphinx, etc)
# -----------------------------------------------------------------------------

# Install minimal build tools for building wheels (usage: `just install-build-tools cpy314`)
# This is lighter than install-tools as it excludes dependencies like twine
# (which depends on nh3, a Rust package that segfaults under QEMU ARM64 emulation)
install-build-tools venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Installing minimal build tools in ${VENV_NAME}..."

    ${VENV_PYTHON} -V
    ${VENV_PYTHON} -m pip -V

    ${VENV_PYTHON} -m pip install -e .[build-tools]

# Install the development tools for this Package in a single environment (usage: `just install-tools cpy314`)
# This also builds NVX CFFI modules so that tests with AUTOBAHN_USE_NVX=1 work.
install-tools venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Installing package development tools in ${VENV_NAME}..."

    ${VENV_PYTHON} -V
    ${VENV_PYTHON} -m pip -V

    # uv pip install --python "{{VENV_DIR}}/${VENV_NAME}/bin/python" -e .[dev]
    ${VENV_PYTHON} -m pip install -e .[dev]

    # Build NVX CFFI modules for editable installs (needed for tests with AUTOBAHN_USE_NVX=1)
    just build-nvx ${VENV_NAME}

# Meta-recipe to run `install-tools` on all environments
install-tools-all:
    #!/usr/bin/env bash
    set -e
    for venv in {{ENVS}}; do
        just install-tools ${venv}
    done

# Install benchmark dependencies (Python 3.11+ only - vmprof requires binary wheels)
install-benchmark venv="": (create venv) (install venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    # Check Python version is 3.11+
    PY_VERSION=$(${VENV_PYTHON} -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
    PY_MAJOR=$(echo ${PY_VERSION} | cut -d. -f1)
    PY_MINOR=$(echo ${PY_VERSION} | cut -d. -f2)

    if [ "${PY_MAJOR}" -lt 3 ] || ([ "${PY_MAJOR}" -eq 3 ] && [ "${PY_MINOR}" -lt 11 ]); then
        echo "❌ ERROR: Benchmarking requires Python 3.11+ (vmprof binary wheels)"
        echo "   Current venv '${VENV_NAME}' has Python ${PY_VERSION}"
        echo ""
        echo "Supported venvs for benchmarking:"
        echo "  - cpy311 (CPython 3.11)"
        echo "  - cpy312 (CPython 3.12)"
        echo "  - cpy313 (CPython 3.13)"
        echo "  - cpy314 (CPython 3.14)"
        echo "  - pypy311 (PyPy 3.11)"
        exit 1
    fi

    echo "==> Installing benchmark dependencies in ${VENV_NAME} (Python ${PY_VERSION})..."
    ${VENV_PYTHON} -V
    ${VENV_PYTHON} -m pip -V
    ${VENV_PYTHON} -m pip install -e .[benchmark]

# Install Rust (rustc & cargo) from upstream via rustup.
install-rust:
    #!/usr/bin/env bash
    set -e
    curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
    . "$HOME/.cargo/env"
    which rustc
    rustc --version
    which cargo
    cargo --version

# Install Autobahn WebSocket Testsuite (Docker image).
install-wstest:
    #!/usr/bin/env bash
    set -e
    sudo docker pull {{AUTOBAHN_TESTSUITE_IMAGE}}

# -----------------------------------------------------------------------------
# -- Linting, Static Typechecking, .. the codebase
# -----------------------------------------------------------------------------

# Automatically fix all formatting and code style issues.
fix-format venv="": (install-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    echo "==> Automatically formatting code with ${VENV_NAME}..."

    # 1. Run the FORMATTER first. This will handle line lengths, quotes, etc.
    #    Uses exclude list from pyproject.toml (includes autobahn/wamp/gen/*, tests, etc.)
    "${VENV_PATH}/bin/ruff" format .

    # 2. Run the LINTER'S FIXER second. This will handle things like
    #    removing unused imports, sorting __all__, etc.
    #    Uses exclude list from pyproject.toml (includes autobahn/wamp/gen/*, tests, etc.)
    "${VENV_PATH}/bin/ruff" check --fix .
    echo "--> Formatting complete."

# Alias for fix-format (backward compatibility)
autoformat venv="": (fix-format venv)

# Lint code using Ruff in a single environment
check-format venv="": (install-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    echo "==> Linting code with ${VENV_NAME}..."
    "${VENV_PATH}/bin/ruff" check .

# Run static type checking with ty (Astral's Rust-based type checker)
# FIXME: Many type errors need to be fixed. For now, we ignore most rules
# to get CI passing. Create follow-up issue to address type errors.
check-typing venv="": (install venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    echo "==> Running static type checks with ty (using ${VENV_NAME} for type stubs)..."
    # Note: Only check src/autobahn/, not src/flatbuffers/ (generated code)
    # FIXME: Many ignores needed until type annotations are fixed
    ty check \
        --python "${VENV_PATH}/bin/python" \
        --ignore unresolved-import \
        --ignore unresolved-attribute \
        --ignore unresolved-reference \
        --ignore possibly-missing-attribute \
        --ignore possibly-missing-import \
        --ignore call-non-callable \
        --ignore invalid-assignment \
        --ignore invalid-argument-type \
        --ignore invalid-return-type \
        --ignore invalid-method-override \
        --ignore invalid-type-form \
        --ignore unsupported-operator \
        --ignore too-many-positional-arguments \
        --ignore unknown-argument \
        --ignore non-subscriptable \
        --ignore not-iterable \
        --ignore no-matching-overload \
        --ignore conflicting-declarations \
        --ignore deprecated \
        src/autobahn/

# Run coverage for Twisted tests only
check-coverage-twisted venv="" use_nvx="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    # Handle NVX configuration
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        export AUTOBAHN_USE_NVX=1
        echo "==> Running Twisted tests with coverage in ${VENV_NAME} (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        export AUTOBAHN_USE_NVX=0
        echo "==> Running Twisted tests with coverage in ${VENV_NAME} (WITHOUT NVX)..."
    else
        echo "==> Running Twisted tests with coverage in ${VENV_NAME} (AUTO NVX)..."
    fi

    # Clean previous coverage data
    rm -f .coverage .coverage.*

    # Run Twisted tests with coverage
    USE_TWISTED=1 "${VENV_PATH}/bin/coverage" run \
        --source=autobahn \
        --parallel-mode \
        -m twisted.trial --no-recurse \
        autobahn.test \
        autobahn.twisted.test \
        autobahn.websocket.test \
        autobahn.rawsocket.test \
        autobahn.wamp.test \
        autobahn.nvx.test

# Run coverage for asyncio tests only
check-coverage-asyncio venv="" use_nvx="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    # Handle NVX configuration
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        export AUTOBAHN_USE_NVX=1
        echo "==> Running asyncio tests with coverage in ${VENV_NAME} (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        export AUTOBAHN_USE_NVX=0
        echo "==> Running asyncio tests with coverage in ${VENV_NAME} (WITHOUT NVX)..."
    else
        echo "==> Running asyncio tests with coverage in ${VENV_NAME} (AUTO NVX)..."
    fi

    # Run asyncio tests with coverage (parallel mode to combine later)
    USE_ASYNCIO=1 "${VENV_PATH}/bin/coverage" run \
        --source=src/autobahn \
        --parallel-mode \
        -m pytest -s -v -rfP \
        --ignore=./src/autobahn/twisted ./src/autobahn

# Combined coverage report from both Twisted and asyncio tests
check-coverage-combined venv="" use_nvx="": (check-coverage-twisted venv use_nvx) (check-coverage-asyncio venv use_nvx)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    # Determine NVX suffix for report naming
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        NVX_SUFFIX="-with-nvx"
        echo "==> Combining coverage data from Twisted and asyncio tests (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        NVX_SUFFIX="-without-nvx"
        echo "==> Combining coverage data from Twisted and asyncio tests (WITHOUT NVX)..."
    else
        NVX_SUFFIX=""
        echo "==> Combining coverage data from Twisted and asyncio tests (AUTO NVX)..."
    fi

    # Combine all coverage data files
    "${VENV_PATH}/bin/coverage" combine

    # Generate reports with NVX-specific naming
    mkdir -p docs/_build/html
    "${VENV_PATH}/bin/coverage" html -d docs/_build/html/coverage-combined${NVX_SUFFIX}
    "${VENV_PATH}/bin/coverage" report --show-missing

    echo ""
    echo "✅ Combined coverage report generated:"
    echo "   HTML: docs/_build/html/coverage-combined${NVX_SUFFIX}/index.html"
    echo "   Text: above summary"

# Legacy coverage recipe (DEPRECATED - use check-coverage-combined instead)
check-coverage venv="" use_nvx="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    echo "⚠️  DEPRECATED: Use 'just check-coverage-combined' for comprehensive coverage"
    echo "⚠️  This recipe only runs pytest coverage and misses Twisted-specific code paths"
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    # Handle NVX configuration
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        export AUTOBAHN_USE_NVX=1
        NVX_SUFFIX="-with-nvx"
        echo "==> Running tests with coverage with ${VENV_NAME} (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        export AUTOBAHN_USE_NVX=0
        NVX_SUFFIX="-without-nvx"
        echo "==> Running tests with coverage with ${VENV_NAME} (WITHOUT NVX)..."
    else
        NVX_SUFFIX=""
        echo "==> Running tests with coverage with ${VENV_NAME} (AUTO NVX)..."
    fi

    mkdir -p docs/_build/html
    # for now, ignore any non-zero exit code by prefixing with hyphen (FIXME: remove later)
    "${VENV_PATH}/bin/pytest" \
        --cov=autobahn \
        --cov-report=html:docs/_build/html/coverage${NVX_SUFFIX}

    echo "--> Coverage report generated in docs/_build/html/coverage${NVX_SUFFIX}/index.html"

# Verify all WebSocket compression methods are available (usage: `just check-compressors cpy314 "permessage-deflate, permessage-brotli"`)
check-compressors venv="" expect="permessage-brotli,permessage-bzip2,permessage-deflate,permessage-snappy": (install venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    EXPECT_LIST="{{ expect }}"

    echo "==> Checking WebSocket compression methods in ${VENV_NAME}..."
    TMP_SCRIPT="/tmp/check_compressors_$$.py"
    {
        echo "import sys"
        echo "from autobahn.websocket.compress import PERMESSAGE_COMPRESSION_EXTENSION"
        echo ""
        echo "available = sorted(PERMESSAGE_COMPRESSION_EXTENSION.keys())"
        echo ""
        echo "print('Available WebSocket Compression Methods:')"
        echo "print('=' * 70)"
        echo "for ext_name in available:"
        echo "    ext_classes = PERMESSAGE_COMPRESSION_EXTENSION[ext_name]"
        echo "    pmce_class = ext_classes.get('PMCE')"
        echo "    if pmce_class:"
        echo "        class_ref = f\"{pmce_class.__module__}.{pmce_class.__name__}\""
        echo "        print(f'  {ext_name:25s} -> {class_ref}')"
        echo "    else:"
        echo "        print(f'  {ext_name:25s} -> (no PMCE class found)')"
        echo "print('=' * 70)"
        echo "print(f'Total: {len(available)} compression methods available')"
        echo ""
        echo "# Output list for bash validation"
        echo "print('ACTUAL_LIST:' + ','.join(available))"
    } > "${TMP_SCRIPT}"
    OUTPUT=$("${VENV_PATH}/bin/python" "${TMP_SCRIPT}")
    rm "${TMP_SCRIPT}"
    echo "${OUTPUT}" | grep -v "^ACTUAL_LIST:"

    if [ -n "${EXPECT_LIST}" ]; then
        echo ""
        echo "==> Validating against expected list..."
        ACTUAL=$(echo "${OUTPUT}" | grep "^ACTUAL_LIST:" | cut -d: -f2)

        # Convert comma-separated strings to sorted arrays
        IFS=',' read -ra EXPECTED_ARRAY <<< "${EXPECT_LIST}"
        IFS=',' read -ra ACTUAL_ARRAY <<< "${ACTUAL}"

        # Trim whitespace and sort
        EXPECTED_SORTED=($(for item in "${EXPECTED_ARRAY[@]}"; do echo "${item}" | xargs; done | sort))
        ACTUAL_SORTED=($(for item in "${ACTUAL_ARRAY[@]}"; do echo "${item}" | xargs; done | sort))

        # Compare arrays
        if [ "${EXPECTED_SORTED[*]}" != "${ACTUAL_SORTED[*]}" ]; then
            echo "❌ ERROR: Compression methods mismatch!"
            echo ""
            echo "Expected: ${EXPECTED_SORTED[*]}"
            echo "Actual:   ${ACTUAL_SORTED[*]}"
            echo ""
            # Show differences
            echo "Missing:  $(comm -23 <(printf '%s\n' "${EXPECTED_SORTED[@]}") <(printf '%s\n' "${ACTUAL_SORTED[@]}") | tr '\n' ' ')"
            echo "Extra:    $(comm -13 <(printf '%s\n' "${EXPECTED_SORTED[@]}") <(printf '%s\n' "${ACTUAL_SORTED[@]}") | tr '\n' ' ')"
            exit 1
        else
            echo "✅ Compression methods match expected list (${EXPECTED_SORTED[*]})"
        fi
    else
        echo "✅ Compression methods check completed"
    fi

# Verify all WAMP serializers are available (usage: `just check-serializers cpy314 "json, msgpack, cbor, ubjson, flatbuffers"`)
check-serializers venv="" expect="cbor,flatbuffers,json,msgpack,ubjson": (install venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    EXPECT_LIST="{{ expect }}"

    echo "==> Checking WAMP serializers in ${VENV_NAME}..."
    TMP_SCRIPT="/tmp/check_serializers_$$.py"
    {
        echo "import sys"
        echo "from autobahn.wamp.serializer import SERID_TO_OBJSER"
        echo ""
        echo "available = sorted(SERID_TO_OBJSER.keys())"
        echo ""
        echo "print('Available WAMP Serializers:')"
        echo "print('=' * 70)"
        echo "for ser_name in available:"
        echo "    ser_class = SERID_TO_OBJSER[ser_name]"
        echo "    class_ref = f\"{ser_class.__module__}.{ser_class.__name__}\""
        echo "    print(f'  {ser_name:25s} -> {class_ref}')"
        echo "print('=' * 70)"
        echo "print(f'Total: {len(available)} serializers available')"
        echo ""
        echo "# Output list for bash validation"
        echo "print('ACTUAL_LIST:' + ','.join(available))"
    } > "${TMP_SCRIPT}"
    OUTPUT=$("${VENV_PATH}/bin/python" "${TMP_SCRIPT}")
    rm "${TMP_SCRIPT}"
    echo "${OUTPUT}" | grep -v "^ACTUAL_LIST:"

    if [ -n "${EXPECT_LIST}" ]; then
        echo ""
        echo "==> Validating against expected list..."
        ACTUAL=$(echo "${OUTPUT}" | grep "^ACTUAL_LIST:" | cut -d: -f2)

        # Convert comma-separated strings to sorted arrays
        IFS=',' read -ra EXPECTED_ARRAY <<< "${EXPECT_LIST}"
        IFS=',' read -ra ACTUAL_ARRAY <<< "${ACTUAL}"

        # Trim whitespace and sort
        EXPECTED_SORTED=($(for item in "${EXPECTED_ARRAY[@]}"; do echo "${item}" | xargs; done | sort))
        ACTUAL_SORTED=($(for item in "${ACTUAL_ARRAY[@]}"; do echo "${item}" | xargs; done | sort))

        # Compare arrays
        if [ "${EXPECTED_SORTED[*]}" != "${ACTUAL_SORTED[*]}" ]; then
            echo "❌ ERROR: WAMP serializers mismatch!"
            echo ""
            echo "Expected: ${EXPECTED_SORTED[*]}"
            echo "Actual:   ${ACTUAL_SORTED[*]}"
            echo ""
            # Show differences
            echo "Missing:  $(comm -23 <(printf '%s\n' "${EXPECTED_SORTED[@]}") <(printf '%s\n' "${ACTUAL_SORTED[@]}") | tr '\n' ' ')"
            echo "Extra:    $(comm -13 <(printf '%s\n' "${EXPECTED_SORTED[@]}") <(printf '%s\n' "${ACTUAL_SORTED[@]}") | tr '\n' ' ')"
            exit 1
        else
            echo "✅ WAMP serializers match expected list (${EXPECTED_SORTED[*]})"
        fi
    else
        echo "✅ Serializers check completed"
    fi

# Run all checks in single environment (usage: `just check cpy314`)
check venv="": (check-compressors venv) (check-serializers venv) (check-format venv) (check-typing venv) (check-coverage-combined venv)

# -----------------------------------------------------------------------------
# -- Unit tests
# -----------------------------------------------------------------------------

# Run the test suite for Twisted/trial and asyncio/pytest (usage: `just test cpy314`)
test venv="" use_nvx="": (test-twisted venv use_nvx) (test-asyncio venv use_nvx)

# Meta-recipe to run `test` on all environments
test-all:
    #!/usr/bin/env bash
    set -e
    for venv in {{ENVS}}; do
        just test ${venv}
    done

# Run basic autobahn library import test (usage: `just test-import cpy314`)
test-import venv="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    ${VENV_PYTHON} -c "from autobahn.wamp.message import Unregistered; print(f'\n{Unregistered.MESSAGE_TYPE}! ohh, yeah.\n')"

# Run the test suite for Twisted using trial (usage: `just test-twisted cpy314`)
test-twisted venv="" use_nvx="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    # Handle NVX configuration
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        export AUTOBAHN_USE_NVX=1
        echo "==> Running test suite for Twisted using trial in ${VENV_NAME} (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        export AUTOBAHN_USE_NVX=0
        echo "==> Running test suite for Twisted using trial in ${VENV_NAME} (WITHOUT NVX)..."
    else
        echo "==> Running test suite for Twisted using trial in ${VENV_NAME} (AUTO NVX)..."
    fi

    # IMPORTANT: Twisted trial doesn't allow to recurse-and-exclude, and hence we
    # need this looong explicit list of tests to run because we must exclude "asyncio"
    #
    # AUTOBAHN_CI_ENABLE_RNG_DEPLETION_TESTS=1:
    #   This enables "autobahn/test/test_rng.py" (on Linux), which tests entropy depletion,
    #   and tests how to correctly read _real_ entropy and block if not enough _real_ entropy
    #   is currently available (see: https://github.com/crossbario/autobahn-python/issues/1275)

    USE_TWISTED=1 ${VENV_PYTHON} -m twisted.trial --no-recurse \
        autobahn.test \
        autobahn.twisted.test \
        autobahn.websocket.test \
        autobahn.rawsocket.test \
        autobahn.wamp.test \
        autobahn.nvx.test

# Run the test suite for asyncio using pytest (usage: `just test-asyncio cpy314`)
test-asyncio venv="" use_nvx="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    # Handle NVX configuration
    USE_NVX="{{ use_nvx }}"
    if [ "${USE_NVX}" = "1" ]; then
        export AUTOBAHN_USE_NVX=1
        echo "==> Running test suite for asyncio using pytest in ${VENV_NAME} (WITH NVX)..."
    elif [ "${USE_NVX}" = "0" ]; then
        export AUTOBAHN_USE_NVX=0
        echo "==> Running test suite for asyncio using pytest in ${VENV_NAME} (WITHOUT NVX)..."
    else
        echo "==> Running test suite for asyncio using pytest in ${VENV_NAME} (AUTO NVX)..."
    fi

    # IMPORTANT: we need to exclude all twisted tests
    USE_ASYNCIO=1 ${VENV_PYTHON} -m pytest -s -v -rfP \
        --ignore=./src/autobahn/twisted ./src/autobahn

# Run WAMP message serdes conformance tests (usage: `just test-serdes cpy311`)
test-serdes venv="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    echo "==> Running WAMP message serdes conformance tests in ${VENV_NAME}..."
    echo "==> Test vectors loaded from: wamp-proto/testsuite/"
    ${VENV_PYTHON} -m pytest -v \
        examples/serdes/tests/test_publish.py \
        examples/serdes/tests/test_event.py \
        examples/serdes/tests/test_subscribe.py \
        examples/serdes/tests/test_subscribed.py \
        examples/serdes/tests/test_published.py \
        examples/serdes/tests/test_unsubscribe.py \
        examples/serdes/tests/test_unsubscribed.py \
        examples/serdes/tests/test_call.py \
        examples/serdes/tests/test_result.py \
        examples/serdes/tests/test_register.py \
        examples/serdes/tests/test_registered.py \
        examples/serdes/tests/test_unregister.py \
        examples/serdes/tests/test_unregistered.py \
        examples/serdes/tests/test_invocation.py \
        examples/serdes/tests/test_yield.py \
        examples/serdes/tests/test_error.py \
        examples/serdes/tests/test_hello.py \
        examples/serdes/tests/test_welcome.py \
        examples/serdes/tests/test_abort.py \
        examples/serdes/tests/test_challenge.py \
        examples/serdes/tests/test_authenticate.py \
        examples/serdes/tests/test_goodbye.py \
        examples/serdes/tests/test_cancel.py \
        examples/serdes/tests/test_interrupt.py \
        examples/serdes/tests/test_eventreceived.py

# -----------------------------------------------------------------------------
# -- Smoke tests (package verification)
# -----------------------------------------------------------------------------

# Run smoke tests on an installed autobahn package (verifies FlatBuffers work)
# This is used by test-wheel-install and test-sdist-install after installation
test-smoke venv="":
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        VENV_NAME=$(just --quiet _get-system-venv-name)
    fi
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    echo "Running smoke tests with Python: $(${VENV_PYTHON} --version)"
    echo "Venv: ${VENV_PATH}"
    echo ""

    # Run the smoke test Python script
    ${VENV_PYTHON} "{{ PROJECT_DIR }}/scripts/smoke_test.py"

# Test bundled flatc compiler and Python API
test-bundled-flatc venv="": (install venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        VENV_NAME=$(just --quiet _get-system-venv-name)
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Testing bundled flatc compiler in ${VENV_NAME}..."
    echo ""

    # Test 1: flatc console script works
    echo "Test 1: Verifying 'flatc --version' works via console script..."
    FLATC_VERSION=$("${VENV_PATH}/bin/flatc" --version 2>&1)
    if [ $? -eq 0 ]; then
        echo "  PASS: flatc console script works"
        echo "  Version: ${FLATC_VERSION}"
    else
        echo "  FAIL: flatc console script failed"
        exit 1
    fi
    echo ""

    # Test 2: Python API get_flatc_path() works
    echo "Test 2: Verifying autobahn._flatc.get_flatc_path() works..."
    FLATC_PATH=$(${VENV_PYTHON} -c "from autobahn._flatc import get_flatc_path; print(get_flatc_path())")
    if [ -x "${FLATC_PATH}" ]; then
        echo "  PASS: get_flatc_path() returns executable path"
        echo "  Path: ${FLATC_PATH}"
    else
        echo "  FAIL: get_flatc_path() returned non-executable: ${FLATC_PATH}"
        exit 1
    fi
    echo ""

    # Test 3: Python API run_flatc() works
    echo "Test 3: Verifying autobahn._flatc.run_flatc() works..."
    RET=$(${VENV_PYTHON} -c "from autobahn._flatc import run_flatc; exit(run_flatc(['--version']))")
    if [ $? -eq 0 ]; then
        echo "  PASS: run_flatc(['--version']) works"
    else
        echo "  FAIL: run_flatc() failed"
        exit 1
    fi
    echo ""

    # Test 4: reflection.fbs is accessible
    echo "Test 4: Verifying reflection.fbs is accessible at runtime..."
    FBS_PATH=$(${VENV_PYTHON} -c 'import autobahn.flatbuffers; from pathlib import Path; p = Path(autobahn.flatbuffers.__file__).parent / "reflection.fbs"; print(p) if p.exists() else exit(1)')
    if [ $? -eq 0 ]; then
        FBS_SIZE=$(stat -c%s "${FBS_PATH}" 2>/dev/null || stat -f%z "${FBS_PATH}")
        echo "  PASS: reflection.fbs found at ${FBS_PATH}"
        echo "  Size: ${FBS_SIZE} bytes"
    else
        echo "  FAIL: reflection.fbs not found"
        exit 1
    fi
    echo ""

    # Test 5: reflection.bfbs is accessible
    echo "Test 5: Verifying reflection.bfbs is accessible at runtime..."
    BFBS_PATH=$(${VENV_PYTHON} -c 'import autobahn.flatbuffers; from pathlib import Path; p = Path(autobahn.flatbuffers.__file__).parent / "reflection.bfbs"; print(p) if p.exists() else exit(1)')
    if [ $? -eq 0 ]; then
        BFBS_SIZE=$(stat -c%s "${BFBS_PATH}" 2>/dev/null || stat -f%z "${BFBS_PATH}")
        echo "  PASS: reflection.bfbs found at ${BFBS_PATH}"
        echo "  Size: ${BFBS_SIZE} bytes"
    else
        echo "  FAIL: reflection.bfbs not found"
        exit 1
    fi
    echo ""

    echo "========================================================================"
    echo "ALL BUNDLED FLATC TESTS PASSED"
    echo "========================================================================"

# Test installing and verifying a built wheel (used in CI for artifact verification)
# Usage: just test-wheel-install /path/to/autobahn-*.whl
test-wheel-install wheel_path:
    #!/usr/bin/env bash
    set -e
    WHEEL_PATH="{{ wheel_path }}"

    if [ ! -f "${WHEEL_PATH}" ]; then
        echo "ERROR: Wheel file not found: ${WHEEL_PATH}"
        exit 1
    fi

    WHEEL_NAME=$(basename "${WHEEL_PATH}")
    echo "========================================================================"
    echo "  WHEEL INSTALL TEST"
    echo "========================================================================"
    echo ""
    echo "Wheel: ${WHEEL_NAME}"
    echo ""

    # Create ephemeral venv name based on wheel
    EPHEMERAL_VENV="smoke-wheel-$$"
    EPHEMERAL_PATH="{{ VENV_DIR }}/${EPHEMERAL_VENV}"

    # Extract Python version from wheel filename
    # Wheel format: {name}-{version}-{python tag}-{abi tag}-{platform tag}.whl
    # Python tag examples: cp312, cp311, pp311, py3
    PYTAG=$(echo "${WHEEL_NAME}" | sed -n 's/.*-\(cp[0-9]*\|pp[0-9]*\|py[0-9]*\)-.*/\1/p')

    if [[ "${PYTAG}" =~ ^cp([0-9])([0-9]+)$ ]]; then
        # CPython wheel (e.g., cp312 -> 3.12)
        MAJOR="${BASH_REMATCH[1]}"
        MINOR="${BASH_REMATCH[2]}"
        PYTHON_SPEC="cpython-${MAJOR}.${MINOR}"
        echo "Detected CPython ${MAJOR}.${MINOR} wheel"
    elif [[ "${PYTAG}" =~ ^pp([0-9])([0-9]+)$ ]]; then
        # PyPy wheel (e.g., pp311 -> pypy-3.11)
        MAJOR="${BASH_REMATCH[1]}"
        MINOR="${BASH_REMATCH[2]}"
        PYTHON_SPEC="pypy-${MAJOR}.${MINOR}"
        echo "Detected PyPy ${MAJOR}.${MINOR} wheel"
    elif [[ "${PYTAG}" =~ ^py([0-9])$ ]]; then
        # Pure Python wheel (e.g., py3) - use system Python
        SYSTEM_VERSION=$(python3 -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
        PYTHON_SPEC="cpython-${SYSTEM_VERSION}"
        echo "Pure Python wheel, using system Python ${SYSTEM_VERSION}"
    else
        # Fallback to system Python
        SYSTEM_VERSION=$(python3 -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
        PYTHON_SPEC="cpython-${SYSTEM_VERSION}"
        echo "Could not detect Python version from wheel, using system Python ${SYSTEM_VERSION}"
    fi

    echo "Creating ephemeral venv with ${PYTHON_SPEC}..."

    mkdir -p "{{ VENV_DIR }}"
    uv venv --seed --python "${PYTHON_SPEC}" "${EPHEMERAL_PATH}"

    EPHEMERAL_PYTHON="${EPHEMERAL_PATH}/bin/python3"

    # Install the wheel
    echo ""
    echo "Installing wheel..."
    ${EPHEMERAL_PYTHON} -m pip install "${WHEEL_PATH}"

    # Run smoke tests
    echo ""
    VENV_DIR="{{ VENV_DIR }}" just test-smoke "${EPHEMERAL_VENV}"

    # Cleanup
    echo ""
    echo "Cleaning up ephemeral venv..."
    rm -rf "${EPHEMERAL_PATH}"

    echo ""
    echo "========================================================================"
    echo "WHEEL INSTALL TEST PASSED: ${WHEEL_NAME}"
    echo "========================================================================"

# Test installing and verifying a source distribution (used in CI for artifact verification)
# Usage: just test-sdist-install /path/to/autobahn-*.tar.gz
test-sdist-install sdist_path:
    #!/usr/bin/env bash
    set -e
    SDIST_PATH="{{ sdist_path }}"

    if [ ! -f "${SDIST_PATH}" ]; then
        echo "ERROR: Source distribution not found: ${SDIST_PATH}"
        exit 1
    fi

    SDIST_NAME=$(basename "${SDIST_PATH}")
    echo "========================================================================"
    echo "  SOURCE DISTRIBUTION INSTALL TEST"
    echo "========================================================================"
    echo ""
    echo "Source dist: ${SDIST_NAME}"
    echo ""

    # Check if cmake is available (required for flatc build)
    if command -v cmake >/dev/null 2>&1; then
        echo "cmake: $(cmake --version | head -1)"
    else
        echo "WARNING: cmake not found - flatc binary will not be built"
        echo "         Install cmake for full functionality"
    fi
    echo ""

    # Create ephemeral venv name
    EPHEMERAL_VENV="smoke-sdist-$$"
    EPHEMERAL_PATH="{{ VENV_DIR }}/${EPHEMERAL_VENV}"

    echo "Creating ephemeral venv: ${EPHEMERAL_VENV}..."

    # Detect system Python version and create venv
    SYSTEM_VERSION=$(python3 -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
    ENV_NAME="cpy$(echo ${SYSTEM_VERSION} | tr -d '.')"
    PYTHON_SPEC="cpython-${SYSTEM_VERSION}"

    mkdir -p "{{ VENV_DIR }}"
    uv venv --seed --python "${PYTHON_SPEC}" "${EPHEMERAL_PATH}"

    EPHEMERAL_PYTHON="${EPHEMERAL_PATH}/bin/python3"

    # Install build dependencies (required for --no-build-isolation)
    echo ""
    echo "Installing build dependencies..."
    ${EPHEMERAL_PYTHON} -m pip install --no-cache-dir setuptools wheel hatchling

    # Install from source distribution
    # Use --no-build-isolation to allow access to system cmake for building flatc
    # Use --no-cache-dir to disable HTTP download cache
    # Use --no-binary autobahn to force building from source (disable wheel cache)
    echo ""
    echo "Installing from source distribution..."
    ${EPHEMERAL_PYTHON} -m pip install --no-build-isolation --no-cache-dir --no-binary autobahn "${SDIST_PATH}"

    # Run smoke tests
    echo ""
    VENV_DIR="{{ VENV_DIR }}" just test-smoke "${EPHEMERAL_VENV}"

    # Cleanup
    echo ""
    echo "Cleaning up ephemeral venv..."
    rm -rf "${EPHEMERAL_PATH}"

    echo ""
    echo "========================================================================"
    echo "SOURCE DISTRIBUTION INSTALL TEST PASSED: ${SDIST_NAME}"
    echo "========================================================================"

# -----------------------------------------------------------------------------
# -- Documentation
# -----------------------------------------------------------------------------

# Install documentation dependencies
install-docs venv="": (create venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Installing documentation tools in ${VENV_NAME}..."
    ${VENV_PYTHON} -m pip install -e .[docs]

# Build optimized SVGs from docs/_graphics/*.svg using scour and generate favicon
optimize-images venv="": (install-docs venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        VENV_NAME=$(just --quiet _get-system-venv-name)
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    if [ ! -d "${VENV_PATH}" ]; then
        just install-tools ${VENV_NAME}
    fi

    SOURCEDIR="{{ PROJECT_DIR }}/docs/_graphics"
    TARGETDIR="{{ PROJECT_DIR }}/docs/_static/img"
    FAVICONDIR="{{ PROJECT_DIR }}/docs/_static"

    echo "==> Building optimized SVG images..."
    mkdir -p "${TARGETDIR}"

    if [ -d "${SOURCEDIR}" ]; then
        find "${SOURCEDIR}" -name "*.svg" -type f | while read -r source_file; do
            filename=$(basename "${source_file}")
            target_file="${TARGETDIR}/${filename}"
            echo "  Processing: ${filename}"
            "${VENV_PATH}/bin/scour" \
                --remove-descriptive-elements \
                --enable-comment-stripping \
                --enable-viewboxing \
                --indent=none \
                --no-line-breaks \
                --shorten-ids \
                "${source_file}" "${target_file}"
        done
    fi

    # Generate favicon from logo SVG using ImageMagick
    LOGO_SVG="${TARGETDIR}/autobahn_logo_blue.svg"
    FAVICON="${FAVICONDIR}/favicon.ico"
    if [ -f "${LOGO_SVG}" ]; then
        echo "==> Generating favicon from logo..."
        if command -v convert &> /dev/null; then
            convert -background none -density 256 "${LOGO_SVG}" \
                -resize 48x48 -gravity center -extent 48x48 \
                -define icon:auto-resize=48,32,16 \
                "${FAVICON}"
            echo "  Created: favicon.ico"
        else
            echo "  Warning: ImageMagick 'convert' not found, skipping favicon generation"
        fi
    fi

# Build the HTML documentation using Sphinx
docs venv="": (optimize-images venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    if [ ! -d "${VENV_PATH}" ]; then
        just install-tools ${VENV_NAME}
    fi
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Building documentation..."
    "${VENV_PATH}/bin/sphinx-build" -b html docs/ docs/_build/html

# Build documentation and open in system viewer
docs-view venv="": (docs venv)
    echo "==> Opening documentation in viewer ..."
    open docs/_build/html/index.html

# Clean the generated documentation
docs-clean:
    echo "==> Cleaning documentation build artifacts..."
    rm -rf docs/_build

# Run spelling check on documentation
docs-spelling venv="": (install-docs venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    TMPBUILDDIR="./.build"
    mkdir -p "${TMPBUILDDIR}"
    echo "==> Running spell check on documentation..."
    "${VENV_PATH}/bin/sphinx-build" -b spelling -d "${TMPBUILDDIR}/docs/doctrees" docs "${TMPBUILDDIR}/docs/spelling"

# -----------------------------------------------------------------------------
# -- Building and Publishing
# -----------------------------------------------------------------------------

# Bump flatbuffers git submodule to latest release tag
# After running this, run `just update-flatbuffers` to copy and apply tweaks
bump-flatbuffers:
    #!/usr/bin/env bash
    set -e
    echo "==> Fetching latest tags from upstream..."
    cd deps/flatbuffers && git fetch --tags
    LATEST_TAG=$(cd deps/flatbuffers && git describe --tags --abbrev=0 $(git rev-list --tags --max-count=1))
    echo "==> Latest release tag: ${LATEST_TAG}"
    cd deps/flatbuffers && git checkout "${LATEST_TAG}"
    echo "==> Submodule now at: $(cd deps/flatbuffers && git describe --tags --always)"
    echo ""
    echo "Next steps:"
    echo "  1. just update-flatbuffers"
    echo "  2. git add deps/flatbuffers src/autobahn/flatbuffers"
    echo "  3. git commit -m 'Bump vendored flatbuffers to ${LATEST_TAG}'"

# Update vendored flatbuffers Python runtime from git submodule
update-flatbuffers:
    ./scripts/update_flatbuffers.sh

# Build wheel only (usage: `just build cpy314`)
build venv="": (install-build-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Building wheel package..."

    # Build the wheel with NVX acceleration
    AUTOBAHN_USE_NVX=1 ${VENV_PYTHON} -m build --wheel

    # Convert linux wheels to manylinux format using auditwheel
    if [ -x "${VENV_PATH}/bin/auditwheel" ]; then
        for wheel in dist/*-linux_*.whl; do
            if [ -f "$wheel" ]; then
                echo "==> Converting $(basename $wheel) to manylinux format..."
                "${VENV_PATH}/bin/auditwheel" show "$wheel"
                "${VENV_PATH}/bin/auditwheel" repair "$wheel" -w dist/
                # Remove the original linux wheel after successful repair
                rm "$wheel"
            fi
        done
    else
        echo "WARNING: auditwheel not available, skipping manylinux conversion"
    fi

    ls -la dist/

# Build source distribution only (no wheels, no NVX flag needed)
build-sourcedist venv="": (install-build-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Building source distribution..."
    ${VENV_PYTHON} -m build --sdist
    ls -la dist/

# Meta-recipe to run `build` on all environments
build-all:
    #!/usr/bin/env bash
    for venv in {{ENVS}}; do
        just build ${venv}
    done
    ls -la dist/

# Clean build artifacts
clean-build:
    echo "==> Cleaning build artifacts..."
    rm -rf build/ dist/ *.egg-info/
    echo "==> Build artifacts cleaned."

# Verify wheels using twine check and auditwheel (for native extensions)
verify-wheels venv="": (install-build-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    echo "==> Verifying wheels with twine check..."
    "${VENV_PATH}/bin/twine" check dist/*

    echo ""
    echo "==> Verifying wheels with auditwheel (native extension validation)..."
    # Note: auditwheel is for Linux wheels with native extensions.
    # Autobahn has optional NVX native extensions via CFFI.
    for wheel in dist/*.whl; do
        if [[ "$wheel" == *"none-any"* ]]; then
            echo "    Skipping pure Python wheel: $wheel"
        else
            echo "    Checking: $wheel"
            "${VENV_PATH}/bin/auditwheel" show "$wheel" || true
        fi
    done
    echo "==> Wheel verification complete."

# Download release artifacts from GitHub and publish to PyPI
publish-pypi venv="" tag="":
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    # Determine which tag to use
    TAG="{{ tag }}"
    if [ -z "${TAG}" ]; then
        echo "==> No tag specified. Using latest git tag..."
        TAG=$(git describe --tags --abbrev=0)
        echo "==> Using tag: ${TAG}"
    fi

    # Verify tag looks like a version tag
    if [[ ! "${TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
        echo "❌ Error: Tag '${TAG}' doesn't look like a version tag (expected format: vX.Y.Z)"
        exit 1
    fi

    # Create temp directory for downloads
    TEMP_DIR=$(mktemp -d)
    echo "==> Downloading release artifacts from GitHub release ${TAG}..."
    echo "    Temp directory: ${TEMP_DIR}"

    # Download all release assets
    gh release download "${TAG}" --repo crossbario/autobahn-python --dir "${TEMP_DIR}"

    echo ""
    echo "==> Downloaded files:"
    ls -lh "${TEMP_DIR}"
    echo ""

    # Count wheels and source distributions
    WHEEL_COUNT=$(find "${TEMP_DIR}" -name "*.whl" | wc -l)
    SDIST_COUNT=$(find "${TEMP_DIR}" -name "*.tar.gz" | wc -l)

    echo "Found ${WHEEL_COUNT} wheel(s) and ${SDIST_COUNT} source distribution(s)"

    if [ "${WHEEL_COUNT}" -eq 0 ] || [ "${SDIST_COUNT}" -eq 0 ]; then
        echo "❌ Error: Expected at least 1 wheel and 1 source distribution"
        echo "    Wheels found: ${WHEEL_COUNT}"
        echo "    Source dist found: ${SDIST_COUNT}"
        rm -rf "${TEMP_DIR}"
        exit 1
    fi

    # Ensure twine is installed
    if [ ! -f "${VENV_PATH}/bin/twine" ]; then
        echo "==> Installing twine in ${VENV_NAME}..."
        "${VENV_PATH}/bin/pip" install twine
    fi

    echo "==> Publishing to PyPI using twine..."
    "${VENV_PATH}/bin/twine" upload "${TEMP_DIR}"/*.whl "${TEMP_DIR}"/*.tar.gz

    # Cleanup
    rm -rf "${TEMP_DIR}"
    echo "✅ Successfully published ${TAG} to PyPI"

# Trigger Read the Docs build for a specific tag
publish-rtd tag="":
    #!/usr/bin/env bash
    set -e

    # Determine which tag to use
    TAG="{{ tag }}"
    if [ -z "${TAG}" ]; then
        echo "==> No tag specified. Using latest git tag..."
        TAG=$(git describe --tags --abbrev=0)
        echo "==> Using tag: ${TAG}"
    fi

    # Verify tag looks like a version tag
    if [[ ! "${TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
        echo "❌ Error: Tag '${TAG}' doesn't look like a version tag (expected format: vX.Y.Z)"
        exit 1
    fi

    # Check if RTD_TOKEN is set
    if [ -z "${RTD_TOKEN}" ]; then
        echo "❌ Error: RTD_TOKEN environment variable is not set"
        echo ""
        echo "To trigger RTD builds, you need to:"
        echo "1. Get an API token from https://readthedocs.org/accounts/tokens/"
        echo "2. Export it: export RTD_TOKEN=your_token_here"
        echo ""
        exit 1
    fi

    echo "==> Triggering Read the Docs build for ${TAG}..."
    echo ""

    # Trigger build via RTD API
    # See: https://docs.readthedocs.io/en/stable/api/v3.html#post--api-v3-projects-(string-project_slug)-versions-(string-version_slug)-builds-
    RTD_PROJECT="autobahn"
    RTD_API_URL="https://readthedocs.org/api/v3/projects/${RTD_PROJECT}/versions/${TAG}/builds/"

    echo "==> Calling RTD API..."
    echo "    Project: ${RTD_PROJECT}"
    echo "    Version: ${TAG}"
    echo "    URL: ${RTD_API_URL}"
    echo ""

    # Trigger the build
    HTTP_CODE=$(curl -X POST \
        -H "Authorization: Token ${RTD_TOKEN}" \
        -w "%{http_code}" \
        -s -o /tmp/rtd_response.json \
        "${RTD_API_URL}")

    echo "==> API Response (HTTP ${HTTP_CODE}):"
    cat /tmp/rtd_response.json | python3 -m json.tool 2>/dev/null || cat /tmp/rtd_response.json
    echo ""

    if [ "${HTTP_CODE}" = "202" ] || [ "${HTTP_CODE}" = "201" ]; then
        echo "✅ Read the Docs build triggered successfully!"
        echo ""
        echo "Check build status at:"
        echo "  https://readthedocs.org/projects/${RTD_PROJECT}/builds/"
        echo ""
        echo "Documentation will be available at:"
        echo "  https://${RTD_PROJECT}.readthedocs.io/en/${TAG}/"
        echo "  https://${RTD_PROJECT}.readthedocs.io/en/stable/ (if marked as stable)"
        echo ""
    else
        echo "❌ Error: Failed to trigger RTD build (HTTP ${HTTP_CODE})"
        echo ""
        echo "Common issues:"
        echo "- Invalid RTD_TOKEN"
        echo "- Version/tag doesn't exist in RTD project"
        echo "- Network/API connectivity problems"
        echo ""
        exit 1
    fi

    rm -f /tmp/rtd_response.json

# Publish to both PyPI and Read the Docs (meta-recipe)
publish venv="" tag="": (publish-pypi venv tag) (publish-rtd tag)
    #!/usr/bin/env bash
    set -e
    TAG="{{ tag }}"
    if [ -z "${TAG}" ]; then
        TAG=$(git describe --tags --abbrev=0)
    fi
    echo ""
    echo "════════════════════════════════════════════════════════════"
    echo "✅ Successfully published version ${TAG}"
    echo "════════════════════════════════════════════════════════════"
    echo ""
    echo "📦 PyPI: https://pypi.org/project/autobahn/${TAG#v}/"
    echo "📚 RTD:  https://autobahnpython.readthedocs.io/en/${TAG}/"
    echo ""

# -----------------------------------------------------------------------------
# -- FlatBuffers Schema Generation
# -----------------------------------------------------------------------------

# Install FlatBuffers compiler (flatc) to /usr/local/bin (SYSTEM-WIDE)
#
# WARNING: You probably DON'T need this!
#
# autobahn-python bundles flatc in binary wheels and source distributions.
# After installing autobahn, you can use the bundled flatc via:
#
#   flatc --version              # If installed via pip/wheel
#   python -m autobahn._flatc    # Alternative invocation
#
# This recipe installs a SEPARATE system-wide flatc binary to /usr/local/bin.
# Only use this if you specifically need a system flatc that is independent
# of your Python environment.
#
install-flatc-system:
    #!/usr/bin/env bash
    set -e

    echo "======================================================================"
    echo "WARNING: Installing SYSTEM-WIDE flatc to /usr/local/bin"
    echo "======================================================================"
    echo ""
    echo "You probably DON'T need this!"
    echo ""
    echo "autobahn-python bundles flatc in binary wheels. After 'pip install autobahn':"
    echo "  - Run 'flatc --version' to use the bundled compiler"
    echo "  - The bundled flatc version matches the vendored FlatBuffers runtime"
    echo ""
    echo "This recipe installs a SEPARATE system flatc that may have a different"
    echo "version than the bundled one, potentially causing compatibility issues."
    echo ""
    read -p "DO YOU REALLY WANT TO INSTALL SYSTEM-WIDE FLATC? [y/N] " -n 1 -r
    echo
    if [[ ! $REPLY =~ ^[Yy]$ ]]; then
        echo "Aborted."
        exit 0
    fi

    FLATC_VERSION="25.9.23"
    FLATC_URL="https://github.com/google/flatbuffers/releases/download/v${FLATC_VERSION}/Linux.flatc.binary.g++-13.zip"
    TEMP_DIR=$(mktemp -d)

    echo ""
    echo "==> Installing FlatBuffers compiler v${FLATC_VERSION}..."
    echo "    URL: ${FLATC_URL}"
    echo "    Temp dir: ${TEMP_DIR}"

    # Download and extract
    cd "${TEMP_DIR}"
    curl -L -o flatc.zip "${FLATC_URL}"
    unzip flatc.zip

    # Install to /usr/local/bin (requires sudo)
    echo "==> Installing flatc to /usr/local/bin (requires sudo)..."
    sudo mv flatc /usr/local/bin/flatc
    sudo chmod +x /usr/local/bin/flatc

    # Cleanup
    rm -rf "${TEMP_DIR}"

    # Verify installation
    echo "==> Verification:"
    /usr/local/bin/flatc --version
    echo ""
    echo "✅ System-wide FlatBuffers compiler v${FLATC_VERSION} installed to /usr/local/bin/flatc"
    echo ""
    echo "NOTE: The bundled flatc in autobahn wheels may have a different path priority."
    echo "      Use '/usr/local/bin/flatc' explicitly if you need the system version."

# Clean generated FlatBuffers files
clean-fbs:
    echo "==> Cleaning FlatBuffers generated files..."
    rm -rf ./src/autobahn/wamp/gen/

# Build FlatBuffers schema files and Python bindings
build-fbs venv="": (install-tools venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"

    FBSFILES="./src/autobahn/wamp/flatbuffers/*.fbs"
    FLATC="flatc"
    echo "==> Generating FlatBuffers binary schema and Python wrappers using $(${FLATC} --version)..."

    # Generate schema binary type library (*.bfbs files)
    ${FLATC} -o ./src/autobahn/wamp/gen/schema/ --binary --schema --bfbs-comments --bfbs-builtins ${FBSFILES}
    echo "--> Generated $(find ./src/autobahn/wamp/gen/schema/ -name '*.bfbs' | wc -l) .bfbs files"

    # Generate schema Python bindings (*.py files)
    ${FLATC} -o ./src/autobahn/wamp/gen/ --python ${FBSFILES}
    touch ./src/autobahn/wamp/gen/__init__.py
    echo "--> Generated $(find ./src/autobahn/wamp/gen/ -name '*.py' | wc -l) .py files"

    # Fix import paths in generated files (flatc generates relative imports)
    # Change: from wamp.proto.X import X
    # To:     from autobahn.wamp.gen.wamp.proto.X import X
    find ./src/autobahn/wamp/gen/wamp/proto/ -name "*.py" -exec sed -i 's/from wamp\.proto\./from autobahn.wamp.gen.wamp.proto./g' {} +
    echo "--> Fixed import paths in generated files"

    # Fix flatbuffers imports to use vendored version under autobahn namespace
    # Change: import flatbuffers -> from autobahn import flatbuffers
    # Change: from flatbuffers.X import Y -> from autobahn.flatbuffers.X import Y
    find ./src/autobahn/wamp/gen/ -name "*.py" -exec sed -i 's/^import flatbuffers$/from autobahn import flatbuffers/' {} +
    find ./src/autobahn/wamp/gen/ -name "*.py" -exec sed -i 's/from flatbuffers\./from autobahn.flatbuffers./g' {} +
    echo "--> Fixed flatbuffers imports to use autobahn.flatbuffers"

    echo "Auto-formatting code using ruff after flatc code generation .."
    "${VENV_PATH}/bin/ruff" format ./src/autobahn/wamp/gen/
    "${VENV_PATH}/bin/ruff" check --fix ./src/autobahn/wamp/gen/

# -----------------------------------------------------------------------------
# -- File Management Utilities
# -----------------------------------------------------------------------------

# Rename audit files to replace ':' with '_' for Windows compatibility
fix-audit-filenames:
    #!/usr/bin/env bash
    set -e

    echo "==> Renaming audit files to replace ':' with '_' for Windows compatibility..."

    # Check if .audit directory exists
    if [ ! -d ".audit" ]; then
        echo "No .audit directory found, nothing to rename."
        exit 0
    fi

    # Count files that need renaming
    FILES_TO_RENAME=$(find .audit -name "*:*" -type f | wc -l)

    if [ "$FILES_TO_RENAME" -eq 0 ]; then
        echo "No files with ':' characters found in .audit directory."
        exit 0
    fi

    echo "Found $FILES_TO_RENAME files to rename:"
    find .audit -name "*:*" -type f
    echo ""

    # Rename files
    find .audit -name "*:*" -type f | while read -r file; do
        # Get directory and filename
        dir=$(dirname "$file")
        filename=$(basename "$file")

        # Replace : with _
        new_filename="${filename//:/_}"
        new_file="$dir/$new_filename"

        echo "Renaming: $filename -> $new_filename"
        mv "$file" "$new_file"
    done

    echo ""
    echo "==> Renaming complete! Updated files:"
    ls -la .audit/
    echo ""
    echo "These files are now Windows-compatible."

# -----------------------------------------------------------------------------
# -- WAMP Message Serialization Benchmarks
# -----------------------------------------------------------------------------

# Run a single WAMP serialization benchmark (usage: `just benchmark-serialization-run cpy311 cbor normal small 10`)
benchmark-serialization-run venv="" serializer="cbor" payload_mode="normal" payload_size="small" iterations="10": (install-benchmark venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    SERIALIZER="{{ serializer }}"
    PAYLOAD_MODE="{{ payload_mode }}"
    PAYLOAD_SIZE="{{ payload_size }}"
    ITERATIONS="{{ iterations }}"

    # Ensure build directory exists
    mkdir -p examples/benchmarks/serialization/build

    echo "==> Running WAMP serialization benchmark in ${VENV_NAME}..."
    echo "    Serializer: ${SERIALIZER}"
    echo "    Payload mode: ${PAYLOAD_MODE}"
    echo "    Payload size: ${PAYLOAD_SIZE}"
    echo "    Iterations: ${ITERATIONS}"
    echo ""

    BENCHMARK_DIR="{{ PROJECT_DIR }}/examples/benchmarks/serialization"
    cd "${BENCHMARK_DIR}"

    # Convert relative venv path to absolute if needed
    if [[ "${VENV_PYTHON}" != /* ]]; then
        VENV_PYTHON="{{ PROJECT_DIR }}/${VENV_PYTHON}"
    fi

    ${VENV_PYTHON} main.py run \
        --serializer "${SERIALIZER}" \
        --payload_mode "${PAYLOAD_MODE}" \
        --payload_size "${PAYLOAD_SIZE}" \
        --iterations "${ITERATIONS}" \
        --profile "build/profile_${SERIALIZER}_${PAYLOAD_MODE}_${PAYLOAD_SIZE}.dat" \
        --results build

# Run full WAMP serialization benchmark suite across all serializers and payload configurations (usage: `just benchmark-serialization-suite cpy311`)
benchmark-serialization-suite venv="" iterations="10": (install-benchmark venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    ITERATIONS="{{ iterations }}"

    echo "==> Running full WAMP serialization benchmark suite in ${VENV_NAME}..."
    echo "    Iterations per benchmark: ${ITERATIONS}"
    echo ""

    # Test all serializers with small payload (normal mode)
    for serializer in json msgpack cbor ubjson flatbuffers; do
        for mode in normal transparent; do
            for size in empty small medium large xl xxl; do
                echo ""
                echo "==> Running: ${serializer}, ${mode}, ${size}"
                just benchmark-serialization-run "${VENV_NAME}" "${serializer}" "${mode}" "${size}" "${ITERATIONS}" || true
            done
        done
    done

    # Test ujson variant (if available)
    echo ""
    echo "==> Testing ujson variant..."
    AUTOBAHN_USE_UJSON=1 just benchmark-serialization-run "${VENV_NAME}" json normal small "${ITERATIONS}" || true
    AUTOBAHN_USE_UJSON=1 just benchmark-serialization-run "${VENV_NAME}" json transparent small "${ITERATIONS}" || true

    # Test cbor2 variant (if available)
    echo ""
    echo "==> Testing cbor2 variant..."
    AUTOBAHN_USE_CBOR2=1 just benchmark-serialization-run "${VENV_NAME}" cbor normal small "${ITERATIONS}" || true
    AUTOBAHN_USE_CBOR2=1 just benchmark-serialization-run "${VENV_NAME}" cbor transparent small "${ITERATIONS}" || true

    echo ""
    echo "==> Full benchmark suite completed!"
    echo "    Results: examples/benchmarks/serialization/build/*.json"
    echo "    Profiles: examples/benchmarks/serialization/build/*.dat"

# Generate HTML report from WAMP serialization benchmark results (usage: `just benchmark-serialization-report cpy311`)
benchmark-serialization-report venv="": (install-benchmark venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    if [ ! -d "examples/benchmarks/serialization/build" ]; then
        echo "❌ ERROR: No benchmark results found in examples/benchmarks/serialization/build/"
        echo ""
        echo "Please run benchmarks first using:"
        echo "  just benchmark-serialization-run"
        echo "  or"
        echo "  just benchmark-serialization-suite"
        echo ""
        exit 1
    fi

    echo "==> Generating HTML report from benchmark results..."

    BENCHMARK_DIR="{{ PROJECT_DIR }}/examples/benchmarks/serialization"
    cd "${BENCHMARK_DIR}"

    # Convert relative venv path to absolute if needed
    if [[ "${VENV_PYTHON}" != /* ]]; then
        VENV_PYTHON="{{ PROJECT_DIR }}/${VENV_PYTHON}"
    fi

    ${VENV_PYTHON} main.py index --output build

    echo ""
    echo "✅ HTML report generated!"
    echo "    Report: examples/benchmarks/serialization/build/index.html"
    echo ""
    echo "To view the report:"
    echo "  python -m http.server 8000 -d examples/benchmarks/serialization/build"
    echo "  then visit http://localhost:8000"

# Generate flamegraph SVGs from vmprof profile data (usage: `just benchmark-serialization-flamegraphs cpy311`)
benchmark-serialization-flamegraphs venv="": (install-benchmark venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")

    if [ ! -d "examples/benchmarks/serialization/build" ]; then
        echo "❌ ERROR: No benchmark results found in examples/benchmarks/serialization/build/"
        echo ""
        echo "Please run benchmarks first using:"
        echo "  just benchmark-serialization-run"
        echo "  or"
        echo "  just benchmark-serialization-suite"
        echo ""
        exit 1
    fi

    # Convert relative venv path to absolute if needed
    if [[ "${VENV_PYTHON}" != /* ]]; then
        VENV_PYTHON="{{ PROJECT_DIR }}/${VENV_PYTHON}"
    fi

    BENCHMARK_DIR="{{ PROJECT_DIR }}/examples/benchmarks/serialization"

    # Ensure logo is in build directory
    if [ ! -f "${BENCHMARK_DIR}/build/crossbarfx_black.svg" ]; then
        echo "==> Copying logo to build directory..."
        cp "${BENCHMARK_DIR}/crossbarfx_black.svg" "${BENCHMARK_DIR}/build/"
    fi

    echo "==> Generating flamegraph SVGs from vmprof profiles..."
    ${BENCHMARK_DIR}/generate_flamegraphs.sh "${BENCHMARK_DIR}/build" "${VENV_PYTHON}"

# Clean WAMP serialization benchmark artifacts (usage: `just benchmark-serialization-clean`)
benchmark-serialization-clean:
    #!/usr/bin/env bash
    set -e
    echo "==> Cleaning WAMP serialization benchmark artifacts..."

    if [ -d "examples/benchmarks/serialization/build" ]; then
        rm -rf examples/benchmarks/serialization/build
        echo "✅ Removed examples/benchmarks/serialization/build/"
    else
        echo "ℹ️  No benchmark artifacts to clean (build directory doesn't exist)"
    fi

# -----------------------------------------------------------------------------
# -- WebSocket compliance testing
# -----------------------------------------------------------------------------

# Run Autobahn WebSocket Testsuite in fuzzingserver mode.
wstest-fuzzingserver config_dir="" output_dir="" mode="quick":
    #!/usr/bin/env bash
    set -e
    CONFIG_DIR="{{ config_dir }}"
    if [ -z "${CONFIG_DIR}" ]; then
        echo "==> No wstest config directory specified. Using default {{AUTOBAHN_TESTSUITE_CONFIG_DIR}}..."
        CONFIG_DIR="{{AUTOBAHN_TESTSUITE_CONFIG_DIR}}"
    fi
    OUTPUT_DIR="{{ output_dir }}"
    if [ -z "${OUTPUT_DIR}" ]; then
        echo "==> No wstest output directory specified. Using default {{AUTOBAHN_TESTSUITE_OUTPUT_DIR}}..."
        OUTPUT_DIR="{{AUTOBAHN_TESTSUITE_OUTPUT_DIR}}"
    fi
    TEST_MODE="{{ mode }}"
    if [ "${TEST_MODE}" != "quick" ] && [ "${TEST_MODE}" != "full" ]; then
        echo "Error: mode must be 'quick' or 'full', got: ${TEST_MODE}"
        exit 1
    fi
    echo ""
    echo "Using Docker image: {{AUTOBAHN_TESTSUITE_IMAGE}}"
    echo "Using config directory: ${CONFIG_DIR}"
    echo "Using output directory: ${OUTPUT_DIR}"
    echo "Using test mode: ${TEST_MODE}"
    echo ""
    sudo docker run -i --rm \
        -v "${CONFIG_DIR}:/config" \
        -v "${OUTPUT_DIR}:/reports" \
        -p 9001:9001 \
        --name fuzzingserver \
        "{{AUTOBAHN_TESTSUITE_IMAGE}}" \
        wstest -m fuzzingserver -s /config/fuzzingserver-${TEST_MODE}.json

# Run Autobahn|Testsuite in fuzzingclient mode (tests autobahn-python servers)
wstest-fuzzingclient config_dir="" output_dir="" mode="quick":
    #!/usr/bin/env bash
    set -e
    CONFIG_DIR="{{ config_dir }}"
    if [ -z "${CONFIG_DIR}" ]; then
        echo "==> No wstest config directory specified. Using default {{AUTOBAHN_TESTSUITE_CONFIG_DIR}}..."
        CONFIG_DIR="{{AUTOBAHN_TESTSUITE_CONFIG_DIR}}"
    fi
    OUTPUT_DIR="{{ output_dir }}"
    if [ -z "${OUTPUT_DIR}" ]; then
        echo "==> No wstest output directory specified. Using default {{AUTOBAHN_TESTSUITE_OUTPUT_DIR}}..."
        OUTPUT_DIR="{{AUTOBAHN_TESTSUITE_OUTPUT_DIR}}"
    fi
    TEST_MODE="{{ mode }}"
    if [ "${TEST_MODE}" != "quick" ] && [ "${TEST_MODE}" != "full" ]; then
        echo "Error: mode must be 'quick' or 'full', got: ${TEST_MODE}"
        exit 1
    fi
    echo "==> Creating wstest output directory: ${OUTPUT_DIR}"
    mkdir -p "${OUTPUT_DIR}"
    echo "==> Pulling Autobahn|Testsuite Docker image..."
    sudo docker pull "{{AUTOBAHN_TESTSUITE_IMAGE}}"
    echo "==> Running Autobahn|Testsuite in fuzzingclient mode..."
    echo "==> Using test mode: ${TEST_MODE}"
    # for now, ignore any non-zero exit code by prefixing with hyphen (FIXME: remove later)
    sudo docker run -i --rm \
        --network host \
        -v "${CONFIG_DIR}":/config \
        -v "${OUTPUT_DIR}":/reports \
        --name fuzzingclient \
        "{{AUTOBAHN_TESTSUITE_IMAGE}}" \
        wstest -m fuzzingclient -s /config/fuzzingclient-${TEST_MODE}.json

# Run Autobahn|Python WebSocket client on Twisted
wstest-testeeclient-twisted venv="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Running Autobahn|Python WebSocket client on Twisted in ${VENV_NAME}..."

    ${VENV_PYTHON} ./wstest/testee_client_tx.py

# Run Autobahn|Python WebSocket client on asyncio
wstest-testeeclient-asyncio venv="": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Running Autobahn|Python WebSocket client on asyncio in ${VENV_NAME}..."

    ${VENV_PYTHON} ./wstest/testee_client_aio.py

# Run Autobahn|Python WebSocket server on Twisted
wstest-testeeserver-twisted venv="" url="ws://127.0.0.1:9011": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Running Autobahn|Python WebSocket server on Twisted in ${VENV_NAME} at {{ url }}..."
    ${VENV_PYTHON} ./wstest/testee_server_tx.py --url "{{ url }}"

# Run Autobahn|Python WebSocket server on asyncio
wstest-testeeserver-asyncio venv="" url="ws://127.0.0.1:9012": (install-tools venv) (install-dev venv)
    #!/usr/bin/env bash
    set -e
    VENV_NAME="{{ venv }}"
    if [ -z "${VENV_NAME}" ]; then
        echo "==> No venv name specified. Auto-detecting from system Python..."
        VENV_NAME=$(just --quiet _get-system-venv-name)
        echo "==> Defaulting to venv: '${VENV_NAME}'"
    fi
    VENV_PATH="{{ VENV_DIR }}/${VENV_NAME}"
    VENV_PYTHON=$(just --quiet _get-venv-python "${VENV_NAME}")
    echo "==> Running Autobahn|Python WebSocket server on asyncio in ${VENV_NAME} at {{ url }}..."
    ${VENV_PYTHON} ./wstest/testee_server_aio.py --url "{{ url }}"

# Consolidate WebSocket test reports for local documentation
wstest-consolidate-reports:
    #!/usr/bin/env bash
    set -e
    echo "==> Consolidating WebSocket conformance test reports for documentation..."

    # Ensure target directories exists
    mkdir -p docs/_static/websocket/conformance
    mkdir -p docs/_static/websocket/conformance/clients
    mkdir -p docs/_static/websocket/conformance/servers

    # Copy client and server HTML reports to docs/_static
    if [ -d ".wstest/clients" ]; then
        echo "==> Copying client test reports..."
        cp -r .wstest/clients/* docs/_static/websocket/conformance/clients/ || true
    else
        echo "⚠️  No client test reports found in .wstest/clients"
    fi

    if [ -d ".wstest/servers" ]; then
        echo "==> Copying server test reports..."
        cp -r .wstest/servers/* docs/_static/websocket/conformance/servers/ || true
    else
        echo "⚠️  No server test reports found in .wstest/servers"
    fi

    # Create ZIP archive of all clients JSON test reports
    echo "==> Creating clients JSON reports archive..."
    find docs/_static/websocket/conformance/clients -name "*.json" -type f > json_files.txt
    if [ -s json_files.txt ]; then
        json_count=$(wc -l < json_files.txt)
        echo "Found ${json_count} clients JSON test report files"
        zip -r "docs/_static/websocket/conformance/autobahn-python-websocket-client-conformance.zip" -@ < json_files.txt
        echo "✅ Created autobahn-python-websocket-client-conformance.zip with ${json_count} JSON files"
        rm json_files.txt
    else
        echo "⚠️  No clients JSON test report files found"
        rm -f json_files.txt
    fi

    # Create ZIP archive of all servers JSON test reports
    echo "==> Creating servers JSON reports archive..."
    find docs/_static/websocket/conformance/servers -name "*.json" -type f > json_files.txt
    if [ -s json_files.txt ]; then
        json_count=$(wc -l < json_files.txt)
        echo "Found ${json_count} servers JSON test report files"
        zip -r "docs/_static/websocket/conformance/autobahn-python-websocket-server-conformance.zip" -@ < json_files.txt
        echo "✅ Created autobahn-python-websocket-server-conformances.zip with ${json_count} JSON files"
        rm json_files.txt
    else
        echo "⚠️  No servers JSON test report files found"
        rm -f json_files.txt
    fi

    echo "✅ Test reports consolidated for documentation"
    echo "📄 HTML reports: docs/_static/websocket/conformance/"

    sync docs/_static/websocket/conformance/
    du -hs docs/_static/websocket/conformance/

# Download GitHub release artifacts (usage: `just download-github-release` for nightly, or `just download-github-release stable`)
# Downloads wheels, sdist, conformance reports, FlatBuffers schemas, and verifies checksums
# This is the unified download recipe for both docs integration and release notes generation
download-github-release release_type="nightly":
    #!/usr/bin/env bash
    set -euo pipefail

    RELEASE_TYPE="{{ release_type }}"
    REPO="crossbario/autobahn-python"

    echo ""
    echo "════════════════════════════════════════════════════════════"
    echo "  Downloading GitHub Release Artifacts"
    echo "════════════════════════════════════════════════════════════"
    echo ""
    echo "Release type: ${RELEASE_TYPE}"
    echo ""

    # Check if gh is available and authenticated
    if ! command -v gh &> /dev/null; then
        echo "❌ ERROR: GitHub CLI (gh) is not installed"
        echo "   Install: https://cli.github.com/"
        exit 1
    fi

    if ! gh auth status &> /dev/null; then
        echo "❌ ERROR: GitHub CLI is not authenticated"
        echo "   Run: gh auth login"
        exit 1
    fi

    # Determine which release tag to download
    case "${RELEASE_TYPE}" in
        nightly)
            echo "==> Finding latest nightly release (tagged as master-YYYYMMDDHHMM)..."
            RELEASE_TAG=$(gh release list --repo "${REPO}" --limit 20 \
              | grep -o 'master-[0-9]*' \
              | head -1)
            if [ -z "$RELEASE_TAG" ]; then
                echo "❌ ERROR: No nightly release found"
                exit 1
            fi
            echo "✅ Found nightly release: $RELEASE_TAG"
            ;;

        stable|latest)
            echo "==> Finding latest stable release..."
            RELEASE_TAG=$(gh release view --repo "${REPO}" --json tagName -q '.tagName' 2>/dev/null || true)
            if [ -z "$RELEASE_TAG" ]; then
                echo "❌ ERROR: No stable release found"
                exit 1
            fi
            echo "✅ Found stable release: $RELEASE_TAG"
            ;;

        development|dev)
            echo "==> Finding latest development release (tagged as fork-*)..."
            RELEASE_TAG=$(gh release list --repo "${REPO}" --limit 20 \
              | grep -o 'fork-[^[:space:]]*' \
              | head -1)
            if [ -z "$RELEASE_TAG" ]; then
                echo "❌ ERROR: No development release found"
                exit 1
            fi
            echo "✅ Found development release: $RELEASE_TAG"
            ;;

        *)
            # Treat as explicit tag name
            RELEASE_TAG="${RELEASE_TYPE}"
            echo "==> Using explicit release tag: $RELEASE_TAG"
            ;;
    esac

    # Verify release exists
    echo ""
    echo "==> Verifying release exists..."
    if ! gh release view "${RELEASE_TAG}" --repo "${REPO}" &> /dev/null; then
        echo "❌ ERROR: Release '${RELEASE_TAG}' not found"
        echo ""
        echo "Available releases:"
        gh release list --repo "${REPO}" --limit 10
        exit 1
    fi
    echo "✅ Release found"

    # Create download directory (use /tmp/release-artifacts/ for compatibility with generate-release-notes)
    DOWNLOAD_DIR="/tmp/release-artifacts/${RELEASE_TAG}"
    if [ -d "${DOWNLOAD_DIR}" ]; then
        echo ""
        echo "==> Cleaning existing directory: ${DOWNLOAD_DIR}"
        rm -rf "${DOWNLOAD_DIR}"
    fi
    mkdir -p "${DOWNLOAD_DIR}"

    echo ""
    echo "==> Downloading all release artifacts using gh..."
    gh release download "${RELEASE_TAG}" \
        --repo "${REPO}" \
        --dir "${DOWNLOAD_DIR}" \
        --pattern "*" \
        --clobber

    cd "${DOWNLOAD_DIR}"

    # Count different types of files
    WHEEL_COUNT=$(ls -1 *.whl 2>/dev/null | wc -l || echo "0")
    TARBALL_COUNT=$(ls -1 *.tar.gz 2>/dev/null | wc -l || echo "0")
    CHECKSUM_COUNT=$(ls -1 *CHECKSUMS* 2>/dev/null | wc -l || echo "0")

    echo ""
    echo "==> Downloaded assets:"
    ls -la
    echo ""
    echo "==> Asset summary:"
    echo "    Wheels:     ${WHEEL_COUNT}"
    echo "    Tarballs:   ${TARBALL_COUNT}"
    echo "    Checksums:  ${CHECKSUM_COUNT}"

    # Verify checksums if available
    CHECKSUM_FILE=""
    for f in CHECKSUMS.sha256 wheels-CHECKSUMS.sha256 docker-CHECKSUMS.sha256; do
        if [ -f "$f" ]; then
            CHECKSUM_FILE="$f"
            break
        fi
    done

    if [ -n "${CHECKSUM_FILE}" ]; then
        echo ""
        echo "==> Verifying checksums from ${CHECKSUM_FILE}..."
        VERIFIED=0
        FAILED=0
        SKIPPED=0
        while IFS= read -r line; do
            # Skip empty lines
            [ -z "$line" ] && continue

            # Parse: SHA256(filename)= checksum  or  SHA2-256(filename)= checksum
            FILE_PATH=$(echo "$line" | sed -E 's/^SHA2?-?256\(([^)]+)\)=.*/\1/')
            EXPECTED_CHECKSUM=$(echo "$line" | awk -F'= ' '{print $2}')

            # Handle ./prefix
            FILE_PATH="${FILE_PATH#./}"

            if [ -f "$FILE_PATH" ]; then
                ACTUAL_CHECKSUM=$(openssl sha256 "$FILE_PATH" | awk '{print $2}')
                if [ "$ACTUAL_CHECKSUM" = "$EXPECTED_CHECKSUM" ]; then
                    VERIFIED=$((VERIFIED + 1))
                else
                    echo "    ❌ MISMATCH: $FILE_PATH"
                    echo "       Expected: $EXPECTED_CHECKSUM"
                    echo "       Actual:   $ACTUAL_CHECKSUM"
                    FAILED=$((FAILED + 1))
                fi
            else
                SKIPPED=$((SKIPPED + 1))
            fi
        done < "${CHECKSUM_FILE}"

        if [ $FAILED -gt 0 ]; then
            echo "    ❌ ERROR: ${FAILED} file(s) failed verification!"
            exit 1
        else
            echo "    ✅ ${VERIFIED} file(s) verified successfully"
            if [ $SKIPPED -gt 0 ]; then
                echo "    (${SKIPPED} entries skipped - files not present or in sub-checksum files)"
            fi
        fi
    else
        echo ""
        echo "⚠️  No checksum file found - skipping verification"
    fi

    echo ""
    echo "==> Extracting documentation artifacts..."

    # Extract conformance reports (for docs integration)
    CONFORMANCE_TARBALL="autobahn-python-websocket-conformance-${RELEASE_TAG}.tar.gz"
    if [ -f "${CONFORMANCE_TARBALL}" ]; then
        mkdir -p conformance-extracted
        tar -xzf "${CONFORMANCE_TARBALL}" -C conformance-extracted
        # Move with-nvx and without-nvx to top level for docs-integrate-github-release
        if [ -d "conformance-extracted/with-nvx" ]; then
            mv conformance-extracted/with-nvx .
        fi
        if [ -d "conformance-extracted/without-nvx" ]; then
            mv conformance-extracted/without-nvx .
        fi
        rm -rf conformance-extracted
        echo "✅ Extracted conformance reports to with-nvx/, without-nvx/"
    else
        echo "⚠️  No conformance tarball found"
    fi

    # Extract FlatBuffers schemas (for docs integration)
    if [ -f "flatbuffers-schema.tar.gz" ]; then
        mkdir -p flatbuffers
        tar -xzf flatbuffers-schema.tar.gz -C flatbuffers
        echo "✅ Extracted FlatBuffers schemas to flatbuffers/"
    else
        echo "⚠️  No FlatBuffers schema tarball found"
    fi

    echo ""
    echo "════════════════════════════════════════════════════════════"
    echo "  ✅ Download Complete"
    echo "════════════════════════════════════════════════════════════"
    echo ""
    echo "Release:  ${RELEASE_TAG}"
    echo "Location: ${DOWNLOAD_DIR}"
    echo ""
    echo "Contents:"
    echo "  - Wheels:             ${DOWNLOAD_DIR}/*.whl"
    echo "  - Source dist:        ${DOWNLOAD_DIR}/autobahn-*.tar.gz"
    echo "  - Conformance:        ${DOWNLOAD_DIR}/with-nvx/, without-nvx/"
    echo "  - FlatBuffers:        ${DOWNLOAD_DIR}/flatbuffers/"
    echo "  - Chain-of-custody:   ${DOWNLOAD_DIR}/*CHECKSUMS*, *VALIDATION*, *build-info*"
    echo ""
    echo "Next steps:"
    echo "  - Docs integration:   just docs-integrate-github-release ${RELEASE_TAG}"
    echo "  - Release notes:      just generate-release-notes <version> ${RELEASE_TAG}"
    echo "  - Changelog:          just prepare-changelog <version>"
    echo ""

# Integrate downloaded GitHub release artifacts into docs build (usage: `just docs-integrate-github-release` or `just docs-integrate-github-release master-202510180103`)
docs-integrate-github-release release_tag="":
    #!/usr/bin/env bash
    set -e

    RELEASE_TAG="{{ release_tag }}"

    # Check that docs have been built first
    if [ ! -d "docs/_build/html" ]; then
        echo "❌ ERROR: Documentation not built yet"
        echo ""
        echo "Please build documentation first using:"
        echo "  just docs"
        echo ""
        echo "Then integrate artifacts with:"
        echo "  just docs-integrate-github-release"
        echo ""
        exit 1
    fi

    # If no tag specified, find the most recently downloaded artifacts
    if [ -z "${RELEASE_TAG}" ]; then
        echo "==> No release tag specified. Finding latest downloaded artifacts..."
        LATEST_DIR=$(find /tmp/release-artifacts -maxdepth 1 -type d -printf "%T@ %p\n" 2>/dev/null \
          | sort -rn \
          | head -1 \
          | cut -d' ' -f2-)

        if [ -z "${LATEST_DIR}" ] || [ "${LATEST_DIR}" = "/tmp/release-artifacts" ]; then
            echo "❌ ERROR: No downloaded release artifacts found in /tmp/release-artifacts/"
            echo ""
            echo "Please download artifacts first using:"
            echo "  just download-github-release"
            echo ""
            exit 1
        fi

        RELEASE_TAG=$(basename "${LATEST_DIR}")
        echo "✅ Found latest downloaded artifacts: ${RELEASE_TAG}"
    fi

    DOWNLOAD_DIR="/tmp/release-artifacts/${RELEASE_TAG}"

    if [ ! -d "${DOWNLOAD_DIR}" ]; then
        echo "❌ ERROR: Release artifacts not found at: ${DOWNLOAD_DIR}"
        echo ""
        echo "Please download artifacts first using:"
        echo "  just download-github-release ${RELEASE_TAG}"
        echo ""
        exit 1
    fi

    echo "==> Integrating GitHub release artifacts into built documentation..."
    echo "    Release: ${RELEASE_TAG}"
    echo "    Source: ${DOWNLOAD_DIR}"
    echo "    Target: docs/_build/html/_static/"
    echo ""

    # Create target directories in the BUILT docs
    echo "==> Creating target directories in docs/_build/html/_static/..."
    mkdir -p docs/_build/html/_static/websocket/conformance/with-nvx
    mkdir -p docs/_build/html/_static/websocket/conformance/without-nvx
    mkdir -p docs/_build/html/_static/flatbuffers

    # Copy conformance reports (with-nvx)
    if [ -d "${DOWNLOAD_DIR}/with-nvx" ]; then
        echo "==> Copying conformance reports (with NVX)..."
        cp -r "${DOWNLOAD_DIR}/with-nvx"/* docs/_build/html/_static/websocket/conformance/with-nvx/ 2>/dev/null || true
        FILE_COUNT=$(find docs/_build/html/_static/websocket/conformance/with-nvx -type f | wc -l)
        echo "✅ Copied ${FILE_COUNT} files to docs/_build/html/_static/websocket/conformance/with-nvx/"
    else
        echo "⚠️  No with-nvx conformance reports found in ${DOWNLOAD_DIR}"
    fi

    # Copy conformance reports (without-nvx)
    if [ -d "${DOWNLOAD_DIR}/without-nvx" ]; then
        echo "==> Copying conformance reports (without NVX)..."
        cp -r "${DOWNLOAD_DIR}/without-nvx"/* docs/_build/html/_static/websocket/conformance/without-nvx/ 2>/dev/null || true
        FILE_COUNT=$(find docs/_build/html/_static/websocket/conformance/without-nvx -type f | wc -l)
        echo "✅ Copied ${FILE_COUNT} files to docs/_build/html/_static/websocket/conformance/without-nvx/"
    else
        echo "⚠️  No without-nvx conformance reports found in ${DOWNLOAD_DIR}"
    fi

    # Copy FlatBuffers schemas (source .fbs files)
    echo "==> Copying FlatBuffers source schemas (.fbs)..."
    if [ -d "${DOWNLOAD_DIR}/flatbuffers" ]; then
        # New structure: .fbs files are in flatbuffers/ subdirectory
        FBS_COUNT=$(find "${DOWNLOAD_DIR}/flatbuffers" -name "*.fbs" -type f 2>/dev/null | wc -l)
        if [ "${FBS_COUNT}" -gt 0 ]; then
            cp "${DOWNLOAD_DIR}/flatbuffers"/*.fbs docs/_build/html/_static/flatbuffers/ 2>/dev/null || true
            echo "✅ Copied ${FBS_COUNT} .fbs files to docs/_build/html/_static/flatbuffers/"
        else
            echo "⚠️  No .fbs files found in ${DOWNLOAD_DIR}/flatbuffers"
        fi
    else
        # Legacy structure: .fbs files in top-level directory
        FBS_COUNT=$(find "${DOWNLOAD_DIR}" -maxdepth 1 -name "*.fbs" -type f 2>/dev/null | wc -l)
        if [ "${FBS_COUNT}" -gt 0 ]; then
            cp "${DOWNLOAD_DIR}"/*.fbs docs/_build/html/_static/flatbuffers/ 2>/dev/null || true
            echo "✅ Copied ${FBS_COUNT} .fbs files to docs/_build/html/_static/flatbuffers/"
        else
            echo "⚠️  No .fbs files found in ${DOWNLOAD_DIR}"
        fi
    fi

    # Copy FlatBuffers binary schemas (.bfbs files)
    echo "==> Copying FlatBuffers binary schemas (.bfbs)..."
    if [ -d "${DOWNLOAD_DIR}/gen/schema" ]; then
        # New structure: .bfbs files are in gen/schema/ subdirectory
        BFBS_COUNT=$(find "${DOWNLOAD_DIR}/gen/schema" -name "*.bfbs" -type f 2>/dev/null | wc -l)
        if [ "${BFBS_COUNT}" -gt 0 ]; then
            cp "${DOWNLOAD_DIR}/gen/schema"/*.bfbs docs/_build/html/_static/flatbuffers/ 2>/dev/null || true
            echo "✅ Copied ${BFBS_COUNT} .bfbs files to docs/_build/html/_static/flatbuffers/"
        else
            echo "⚠️  No .bfbs files found in ${DOWNLOAD_DIR}/gen/schema"
        fi
    else
        # Legacy structure: .bfbs files in top-level directory
        BFBS_COUNT=$(find "${DOWNLOAD_DIR}" -maxdepth 1 -name "*.bfbs" -type f 2>/dev/null | wc -l)
        if [ "${BFBS_COUNT}" -gt 0 ]; then
            cp "${DOWNLOAD_DIR}"/*.bfbs docs/_build/html/_static/flatbuffers/ 2>/dev/null || true
            echo "✅ Copied ${BFBS_COUNT} .bfbs files to docs/_build/html/_static/flatbuffers/"
        else
            echo "⚠️  No .bfbs files found in ${DOWNLOAD_DIR}"
        fi
    fi

    # Copy chain-of-custody / verification files
    echo "==> Copying chain-of-custody files..."
    mkdir -p docs/_build/html/_static/release
    CUSTODY_COUNT=0
    for pattern in "*CHECKSUMS.sha256" "*VALIDATION.txt" "*build-info.txt" "*.verify.txt"; do
        for f in ${DOWNLOAD_DIR}/${pattern}; do
            if [ -f "$f" ]; then
                cp "$f" docs/_build/html/_static/release/
                CUSTODY_COUNT=$((CUSTODY_COUNT + 1))
            fi
        done
    done
    if [ "${CUSTODY_COUNT}" -gt 0 ]; then
        echo "✅ Copied ${CUSTODY_COUNT} chain-of-custody files to docs/_build/html/_static/release/"
    else
        echo "⚠️  No chain-of-custody files found in ${DOWNLOAD_DIR}"
    fi

    echo ""
    echo "════════════════════════════════════════════════════════════"
    echo "✅ GitHub release artifacts integrated into built documentation"
    echo "════════════════════════════════════════════════════════════"
    echo ""
    echo "Integrated artifacts from: ${RELEASE_TAG}"
    echo "Target location: docs/_build/html/_static/"
    echo ""
    echo "Contents integrated:"
    echo "  - Conformance reports: docs/_build/html/_static/websocket/conformance/"
    echo "  - FlatBuffers schemas: docs/_build/html/_static/flatbuffers/"
    echo "  - Chain-of-custody:    docs/_build/html/_static/release/"
    echo ""
    echo "Next steps:"
    echo "  1. View documentation: just docs-view"
    echo "  2. Check conformance reports at: http://localhost:8000/websocket/conformance.html"
    echo "  3. Check FlatBuffers schemas at: http://localhost:8000/wamp/serialization.html"
    echo ""


# -----------------------------------------------------------------------------
# -- Release workflow recipes
# -----------------------------------------------------------------------------

# Generate changelog entry from git history, audit files, and GitHub issues
# Usage: just prepare-changelog 25.12.1
# Requires: gh CLI authenticated (for fetching issue titles)
prepare-changelog version:
    .cicd/scripts/prepare-changelog.sh "{{ version }}" "crossbario/autobahn-python"

# Generate release notes entry from downloaded artifacts
# Usage: just generate-release-notes 25.12.1 master-202512092131
# Requires: artifacts downloaded via `just download-github-release`
generate-release-notes version release_name:
    .cicd/scripts/generate-release-notes.sh "{{ version }}" "{{ release_name }}" "crossbario/autobahn-python"

# Validate release is ready: checks changelog, release notes, version, tests, docs
draft-release version:
    #!/usr/bin/env bash
    set -e
    VERSION="{{ version }}"

    echo ""
    echo "=========================================="
    echo " Validating release ${VERSION}"
    echo "=========================================="
    echo ""

    ERRORS=0

    # Check pyproject.toml version
    PYPROJECT_VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*= *"\(.*\)"/\1/')
    if [ "${PYPROJECT_VERSION}" = "${VERSION}" ]; then
        echo "✅ pyproject.toml version matches: ${VERSION}"
    else
        echo "❌ pyproject.toml version mismatch: ${PYPROJECT_VERSION} != ${VERSION}"
        ERRORS=$((ERRORS + 1))
    fi

    # Check changelog entry
    if grep -q "^${VERSION}$" docs/changelog.rst; then
        echo "✅ Changelog entry exists for ${VERSION}"
    else
        echo "❌ Changelog entry missing for ${VERSION}"
        ERRORS=$((ERRORS + 1))
    fi

    # Check release notes entry
    if grep -q "^${VERSION}" docs/release-notes.rst; then
        echo "✅ Release notes entry exists for ${VERSION}"
    else
        echo "❌ Release notes entry missing for ${VERSION}"
        ERRORS=$((ERRORS + 1))
    fi

    echo ""
    if [ ${ERRORS} -gt 0 ]; then
        echo "=========================================="
        echo " ❌ Validation failed with ${ERRORS} error(s)"
        echo "=========================================="
        exit 1
    else
        echo "=========================================="
        echo " ✅ All checks passed for ${VERSION}"
        echo "=========================================="
    fi

# Full release preparation: validate + test + build docs
prepare-release version venv="":
    #!/usr/bin/env bash
    set -e
    VERSION="{{ version }}"
    VENV="{{ venv }}"

    echo ""
    echo "=========================================="
    echo " Preparing release ${VERSION}"
    echo "=========================================="
    echo ""

    # Run draft-release validation first
    just draft-release "${VERSION}"

    echo ""
    echo "==> Running tests..."
    if [ -n "${VENV}" ]; then
        just test "${VENV}"
    else
        just test
    fi

    echo ""
    echo "==> Building documentation..."
    just docs

    echo ""
    echo "=========================================="
    echo " ✅ Release ${VERSION} is ready"
    echo "=========================================="
    echo ""
    echo "Next steps:"
    echo "  1. git add docs/changelog.rst docs/release-notes.rst pyproject.toml"
    echo "  2. git commit -m \"Release ${VERSION}\""
    echo "  3. git tag v${VERSION}"
    echo "  4. git push && git push --tags"
    echo ""
