diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60cd101..db8c9b4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,7 @@ name: CI on: + pull_request: push: branches: - "**" @@ -9,59 +10,48 @@ on: permissions: contents: read - packages: write jobs: - test-and-publish: + security: + name: Run security workflow + uses: ./.github/workflows/security.yml + + tests: + name: Run test workflow + uses: ./.github/workflows/tests.yml + + lint: + name: Run lint workflow + uses: ./.github/workflows/lint.yml + + publish: + name: Publish image runs-on: ubuntu-latest - env: - PORT: "5000" + needs: + - security + - tests + - lint + if: github.event_name == 'push' + permissions: + contents: read + packages: write steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - - name: Install Python dependencies - run: pip install -r app/requirements.txt - - - name: Prepare app config for CI - run: cp app/config.sample.yaml app/config.yaml - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "20" - cache: npm - cache-dependency-path: app/package.json - - - name: Install Node dependencies - working-directory: app - run: npm install - - - name: Run Cypress tests - uses: cypress-io/github-action@v6 - with: - working-directory: app - install: false - start: python app.py - wait-on: http://127.0.0.1:5000 - wait-on-timeout: 120 - - name: Detect semver tag on current commit id: semver run: | SEMVER_TAG="$(git tag --points-at "$GITHUB_SHA" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1 || true)" if [ -n "$SEMVER_TAG" ]; then - echo "found=true" >> "$GITHUB_OUTPUT" - echo "raw_tag=$SEMVER_TAG" >> "$GITHUB_OUTPUT" - echo "version=${SEMVER_TAG#v}" >> "$GITHUB_OUTPUT" + { + echo "found=true" + echo "raw_tag=$SEMVER_TAG" + echo "version=${SEMVER_TAG#v}" + } >> "$GITHUB_OUTPUT" else echo "found=false" >> "$GITHUB_OUTPUT" fi diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..b479192 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,77 @@ +name: Lint + +on: + workflow_call: + workflow_dispatch: + +permissions: + contents: read + +jobs: + lint-actions: + name: Lint GitHub Actions + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Run actionlint + run: docker run --rm -v "$PWD:/repo" -w /repo rhysd/actionlint:latest + + lint-python: + name: Lint Python + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install lint dependencies + run: | + python -m pip install --upgrade pip + pip install ".[dev]" + + - name: Ruff lint + run: ruff check . + + - name: Ruff format check + run: ruff format --check . + + lint-docker: + name: Lint Dockerfile + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Run hadolint + id: hadolint + continue-on-error: true + uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 + with: + dockerfile: ./Dockerfile + format: sarif + output-file: hadolint-results.sarif + failure-threshold: warning + + - name: Upload hadolint SARIF + if: always() && github.event_name == 'push' + uses: github/codeql-action/upload-sarif@v4 + with: + sarif_file: hadolint-results.sarif + wait-for-processing: true + category: hadolint + + - name: Fail on hadolint warnings + if: always() + run: python3 utils/check_hadolint_sarif.py hadolint-results.sarif diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 0000000..7186c6d --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,48 @@ +name: Security + +on: + workflow_call: + +permissions: + contents: read + +jobs: + analyze: + name: Run security scan + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + permissions: + contents: read + packages: read + security-events: write + strategy: + fail-fast: false + matrix: + include: + - language: actions + build-mode: none + - language: javascript-typescript + build-mode: none + - language: python + build-mode: none + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v4 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + queries: security-extended,security-and-quality + + - name: Run manual build steps + if: matrix.build-mode == 'manual' + run: | + echo "No manual build is configured for this repository." + exit 1 + + - name: Perform CodeQL analysis + uses: github/codeql-action/analyze@v4 + with: + category: /language:${{ matrix.language }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..8081b74 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,194 @@ +name: Tests + +on: + workflow_call: + workflow_dispatch: + +permissions: + contents: read + +jobs: + test-lint: + name: Run lint tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Run lint test suite + run: python -m unittest discover -s tests/lint -t . + + test-integration: + name: Run integration tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install integration test dependencies + run: | + python -m pip install --upgrade pip + pip install --ignore-installed . + + - name: Run integration test suite + run: python -m unittest discover -s tests/integration -t . + + test-unit: + name: Run unit tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install unit test dependencies + run: | + python -m pip install --upgrade pip + pip install --ignore-installed . + + - name: Run unit test suite + run: python -m unittest discover -s tests/unit -t . + + security-python: + name: Run Python security checks + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install security dependencies + run: | + python -m pip install --upgrade pip + pip install --ignore-installed ".[dev]" + + - name: Run Bandit + run: python -m bandit -q -ll -ii -r app main.py + + - name: Export runtime requirements + run: python utils/export_runtime_requirements.py > runtime-requirements.txt + + - name: Audit Python runtime dependencies + run: python -m pip_audit -r runtime-requirements.txt + + test-security: + name: Run security guardrail tests + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install security test dependencies + run: | + python -m pip install --upgrade pip + pip install --ignore-installed . + + - name: Run security test suite + run: python -m unittest discover -s tests/security -t . + + e2e: + name: Run end-to-end tests + runs-on: ubuntu-latest + needs: + - test-lint + - test-unit + - test-integration + - security-python + - test-security + env: + FLASK_HOST: "127.0.0.1" + FLASK_PORT: "5001" + PORT: "5001" + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.12" + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install --ignore-installed . + + - name: Prepare app config for CI + run: cp app/config.sample.yaml app/config.yaml + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: npm + cache-dependency-path: app/package.json + + - name: Install Node dependencies + working-directory: app + run: npm install + + - name: Install Cypress system dependencies + run: | + sudo apt-get update + sudo apt-get install -y \ + libasound2t64 \ + libatk-bridge2.0-0 \ + libatk1.0-0 \ + libatspi2.0-0t64 \ + libcups2t64 \ + libdrm2 \ + libgbm1 \ + libglib2.0-0t64 \ + libgtk-3-0t64 \ + libnotify4 \ + libnspr4 \ + libnss3 \ + libpango-1.0-0 \ + libpangocairo-1.0-0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxkbcommon0 \ + libxrandr2 \ + libxss1 \ + libxtst6 \ + xauth \ + xvfb + + - name: Run Cypress tests + uses: cypress-io/github-action@v6 + with: + working-directory: app + install: false + start: python app.py + wait-on: http://127.0.0.1:5001 + wait-on-timeout: 120 diff --git a/.gitignore b/.gitignore index f3d35c3..0efabcf 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,10 @@ app/config.yaml *__pycache__* app/static/cache/* .env -app/cypress/screenshots/* \ No newline at end of file +app/cypress/screenshots/* +.ruff_cache/ +app/node_modules/ +hadolint-results.sarif +build/ +*.egg-info/ +app/core.* diff --git a/Dockerfile b/Dockerfile index 2ac2e12..2c6aee4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,16 @@ -# Base image for Python -FROM python:slim +FROM python:3.12-slim + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + FLASK_HOST=0.0.0.0 + +WORKDIR /tmp/build + +COPY pyproject.toml README.md main.py ./ +COPY app ./app +RUN python -m pip install --no-cache-dir . -# Set the working directory WORKDIR /app - -# Copy and install dependencies -COPY app/requirements.txt requirements.txt -RUN pip install --no-cache-dir -r requirements.txt - -# Copy application code COPY app/ . CMD ["python", "app.py"] diff --git a/MIRRORS b/MIRRORS new file mode 100644 index 0000000..41b138d --- /dev/null +++ b/MIRRORS @@ -0,0 +1,2 @@ +https://pypi.org/project/portfolio-ui/ +git@github.com:kevinveenbirkenbach/port-ui.git diff --git a/Makefile b/Makefile index cc61073..6228ca0 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,8 @@ endif # Default port (can be overridden with PORT env var) PORT ?= 5000 +PYTHON ?= python3 +ACT ?= act # Default port (can be overridden with PORT env var) .PHONY: build @@ -14,10 +16,15 @@ build: # Build the Docker image. docker build -t application-portfolio . +.PHONY: build-no-cache +build-no-cache: + # Build the Docker image without cache. + docker build --no-cache -t application-portfolio . + .PHONY: up up: # Start the application using docker-compose with build. - docker-compose up -d --build + docker-compose up -d --build --force-recreate .PHONY: down down: @@ -75,8 +82,93 @@ browse: # Open the application in the browser at http://localhost:$(PORT) chromium http://localhost:$(PORT) +.PHONY: install +install: + # Install runtime Python dependencies from pyproject.toml. + $(PYTHON) -m pip install -e . + +.PHONY: install-dev +install-dev: + # Install runtime and developer dependencies from pyproject.toml. + $(PYTHON) -m pip install -e ".[dev]" + +.PHONY: npm-install npm-install: + # Install Node.js dependencies for browser tests. cd app && npm install -test: npm-install - cd app && npx cypress run --spec "cypress/e2e/**/*.spec.js" +.PHONY: lint-actions +lint-actions: + # Lint GitHub Actions workflows. + docker run --rm -v "$$PWD:/repo" -w /repo rhysd/actionlint:latest + +.PHONY: lint-python +lint-python: install-dev + # Run Python lint and format checks. + $(PYTHON) -m ruff check . + $(PYTHON) -m ruff format --check . + +.PHONY: lint-docker +lint-docker: + # Lint the Dockerfile. + docker run --rm -i hadolint/hadolint < Dockerfile + +.PHONY: test-lint +test-lint: + # Run lint guardrail tests. + $(PYTHON) -m unittest discover -s tests/lint -t . + +.PHONY: test-integration +test-integration: install + # Run repository integration tests. + $(PYTHON) -m unittest discover -s tests/integration -t . + +.PHONY: test-unit +test-unit: install + # Run repository unit tests. + $(PYTHON) -m unittest discover -s tests/unit -t . + +.PHONY: test-security +test-security: install + # Run repository security guardrail tests. + $(PYTHON) -m unittest discover -s tests/security -t . + +.PHONY: lint +lint: lint-actions lint-python lint-docker test-lint + # Run the full lint suite. + +.PHONY: security +security: install-dev test-security + # Run security checks. + $(PYTHON) -m bandit -q -ll -ii -r app main.py + $(PYTHON) utils/export_runtime_requirements.py > /tmp/portfolio-runtime-requirements.txt + $(PYTHON) -m pip_audit -r /tmp/portfolio-runtime-requirements.txt + +.PHONY: test-e2e +test-e2e: + # Run Cypress end-to-end tests via act (stop portfolio container to free port first). + -docker stop portfolio 2>/dev/null || true + $(ACT) workflow_dispatch -W .github/workflows/tests.yml -j e2e + -docker start portfolio 2>/dev/null || true + +.PHONY: test-workflow +test-workflow: + # Run the GitHub test workflow locally via act. + $(ACT) workflow_dispatch -W .github/workflows/tests.yml + +.PHONY: lint-workflow +lint-workflow: + # Run the GitHub lint workflow locally via act. + $(ACT) workflow_dispatch -W .github/workflows/lint.yml + +.PHONY: quality +quality: lint-workflow test-workflow + # Run the GitHub lint and test workflows locally via act. + +.PHONY: ci +ci: lint security test-unit test-integration test-e2e + # Run the local CI suite. + +.PHONY: test +test: ci + # Run the full validation suite. diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..88ed711 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1 @@ +"""Portfolio UI web application package.""" diff --git a/app/app.py b/app/app.py index 7dd4bd6..b3a0db6 100644 --- a/app/app.py +++ b/app/app.py @@ -1,29 +1,38 @@ -import os -from flask import Flask, render_template -import yaml -import requests -from utils.configuration_resolver import ConfigurationResolver -from utils.cache_manager import CacheManager -from utils.compute_card_classes import compute_card_classes import logging -logging.basicConfig(level=logging.DEBUG) -FLASK_ENV = os.getenv("FLASK_ENV", "production") -FLASK_PORT = int(os.getenv("PORT", 5000)) -print(f"🔧 Starting app on port {FLASK_PORT}, FLASK_ENV={FLASK_ENV}") +import os -from flask import current_app +import requests +import yaml +from flask import Flask, current_app, render_template from markupsafe import Markup +try: + from app.utils.cache_manager import CacheManager + from app.utils.compute_card_classes import compute_card_classes + from app.utils.configuration_resolver import ConfigurationResolver +except ImportError: # pragma: no cover - supports running from the app/ directory. + from utils.cache_manager import CacheManager + from utils.compute_card_classes import compute_card_classes + from utils.configuration_resolver import ConfigurationResolver + +logging.basicConfig(level=logging.DEBUG) + +FLASK_ENV = os.getenv("FLASK_ENV", "production") +FLASK_HOST = os.getenv("FLASK_HOST", "127.0.0.1") +FLASK_PORT = int(os.getenv("FLASK_PORT", os.getenv("PORT", 5000))) +print(f"Starting app on {FLASK_HOST}:{FLASK_PORT}, FLASK_ENV={FLASK_ENV}") + # Initialize the CacheManager cache_manager = CacheManager() # Clear cache on startup cache_manager.clear_cache() + def load_config(app): """Load and resolve the configuration from config.yaml.""" - with open("config.yaml", "r") as f: - config = yaml.safe_load(f) + with open("config.yaml", "r", encoding="utf-8") as handle: + config = yaml.safe_load(handle) if config.get("nasa_api_key"): app.config["NASA_API_KEY"] = config["nasa_api_key"] @@ -32,26 +41,23 @@ def load_config(app): resolver.resolve_links() app.config.update(resolver.get_config()) + def cache_icons_and_logos(app): - """Cache all icons and logos to local files, mit Fallback auf source.""" + """Cache all icons and logos to local files, with a source fallback.""" for card in app.config["cards"]: icon = card.get("icon", {}) if icon.get("source"): cached = cache_manager.cache_file(icon["source"]) - # Fallback: wenn cache_file None liefert, nutze weiterhin source icon["cache"] = cached or icon["source"] - # Company-Logo company_logo = app.config["company"]["logo"] cached = cache_manager.cache_file(company_logo["source"]) company_logo["cache"] = cached or company_logo["source"] - # Platform Favicon favicon = app.config["platform"]["favicon"] cached = cache_manager.cache_file(favicon["source"]) favicon["cache"] = cached or favicon["source"] - # Platform Logo platform_logo = app.config["platform"]["logo"] cached = cache_manager.cache_file(platform_logo["source"]) platform_logo["cache"] = cached or platform_logo["source"] @@ -64,18 +70,22 @@ app = Flask(__name__) load_config(app) cache_icons_and_logos(app) + @app.context_processor def utility_processor(): def include_svg(path): - full_path = os.path.join(current_app.root_path, 'static', path) + full_path = os.path.join(current_app.root_path, "static", path) try: - with open(full_path, 'r', encoding='utf-8') as f: - svg = f.read() - return Markup(svg) - except IOError: - return Markup(f'') + with open(full_path, "r", encoding="utf-8") as handle: + svg = handle.read() + # Trusted local SVG asset shipped with the application package. + return Markup(svg) # nosec B704 + except OSError: + return "" + return dict(include_svg=include_svg) + @app.before_request def reload_config_in_dev(): """Reload config and recache icons before each request in development mode.""" @@ -83,22 +93,22 @@ def reload_config_in_dev(): load_config(app) cache_icons_and_logos(app) -@app.route('/') + +@app.route("/") def index(): """Render the main index page.""" cards = app.config["cards"] lg_classes, md_classes = compute_card_classes(cards) - # fetch NASA APOD URL only if key present apod_bg = None api_key = app.config.get("NASA_API_KEY") if api_key: resp = requests.get( "https://api.nasa.gov/planetary/apod", - params={"api_key": api_key} + params={"api_key": api_key}, + timeout=10, ) if resp.ok: data = resp.json() - # only use if it's an image if data.get("media_type") == "image": apod_bg = data.get("url") @@ -110,8 +120,14 @@ def index(): platform=app.config["platform"], lg_classes=lg_classes, md_classes=md_classes, - apod_bg=apod_bg + apod_bg=apod_bg, ) + if __name__ == "__main__": - app.run(debug=(FLASK_ENV == "development"), host="0.0.0.0", port=FLASK_PORT) + app.run( + debug=(FLASK_ENV == "development"), + host=FLASK_HOST, + port=FLASK_PORT, + use_reloader=False, + ) diff --git a/app/cypress/e2e/navbar_logo_visibility.spec.js b/app/cypress/e2e/navbar_logo_visibility.spec.js index 9e70bc4..867f118 100644 --- a/app/cypress/e2e/navbar_logo_visibility.spec.js +++ b/app/cypress/e2e/navbar_logo_visibility.spec.js @@ -15,7 +15,7 @@ describe('Navbar Logo Visibility', () => { it('should become visible (opacity 1) after entering fullscreen', () => { cy.window().then(win => { - win.fullscreen(); + win.enterFullscreen(); }); cy.get('#navbar_logo', { timeout: 4000 }) .should('have.css', 'opacity', '1'); @@ -23,7 +23,7 @@ describe('Navbar Logo Visibility', () => { it('should become invisible again (opacity 0) after exiting fullscreen', () => { cy.window().then(win => { - win.fullscreen(); + win.enterFullscreen(); win.exitFullscreen(); }); cy.get('#navbar_logo', { timeout: 4000 }) diff --git a/app/requirements.txt b/app/requirements.txt deleted file mode 100644 index ff85c09..0000000 --- a/app/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -flask -requests -pyyaml \ No newline at end of file diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..d2a2e5c --- /dev/null +++ b/app/utils/__init__.py @@ -0,0 +1 @@ +"""Utilities used by the Portfolio UI web application.""" diff --git a/app/utils/cache_manager.py b/app/utils/cache_manager.py index e08c65c..c394cc3 100644 --- a/app/utils/cache_manager.py +++ b/app/utils/cache_manager.py @@ -1,7 +1,9 @@ -import os import hashlib -import requests import mimetypes +import os + +import requests + class CacheManager: def __init__(self, cache_dir="static/cache"): @@ -9,8 +11,7 @@ class CacheManager: self._ensure_cache_dir_exists() def _ensure_cache_dir_exists(self): - if not os.path.exists(self.cache_dir): - os.makedirs(self.cache_dir) + os.makedirs(self.cache_dir, exist_ok=True) def clear_cache(self): if os.path.exists(self.cache_dir): @@ -20,8 +21,10 @@ class CacheManager: os.remove(path) def cache_file(self, file_url): - # generate a short hash for filename - hash_suffix = hashlib.blake2s(file_url.encode('utf-8'), digest_size=8).hexdigest() + hash_suffix = hashlib.blake2s( + file_url.encode("utf-8"), + digest_size=8, + ).hexdigest() parts = file_url.rstrip("/").split("/") base = parts[-2] if parts[-1] == "download" else parts[-1] @@ -31,7 +34,7 @@ class CacheManager: except requests.RequestException: return None - content_type = resp.headers.get('Content-Type', '') + content_type = resp.headers.get("Content-Type", "") ext = mimetypes.guess_extension(content_type.split(";")[0].strip()) or ".png" filename = f"{base}_{hash_suffix}{ext}" full_path = os.path.join(self.cache_dir, filename) @@ -41,5 +44,4 @@ class CacheManager: for chunk in resp.iter_content(1024): f.write(chunk) - # return path relative to /static/ return f"cache/{filename}" diff --git a/app/utils/compute_card_classes.py b/app/utils/compute_card_classes.py index 1b02f5d..a80396d 100644 --- a/app/utils/compute_card_classes.py +++ b/app/utils/compute_card_classes.py @@ -32,7 +32,7 @@ def compute_card_classes(cards): lg_classes.append("col-lg-6") else: lg_classes.append("col-lg-4") - # md classes: If the number of cards is even or if not the last card, otherwise "col-md-12" + # Use a full-width last card on medium screens only when the total count is odd. md_classes = [] for i in range(num_cards): if num_cards % 2 == 0 or i < num_cards - 1: diff --git a/app/utils/configuration_resolver.py b/app/utils/configuration_resolver.py index a3840b7..082b57f 100644 --- a/app/utils/configuration_resolver.py +++ b/app/utils/configuration_resolver.py @@ -13,22 +13,9 @@ class ConfigurationResolver: """ self._recursive_resolve(self.config, self.config) - def __load_children(self,path): - """ - Check if explicitly children should be loaded and not parent - """ - return path.split('.').pop() == "children" - - def _replace_in_dict_by_dict(self, dict_origine, old_key, new_dict): - if old_key in dict_origine: - # Entferne den alten Key - old_value = dict_origine.pop(old_key) - # Füge die neuen Key-Value-Paare hinzu - dict_origine.update(new_dict) - def _replace_in_list_by_list(self, list_origine, old_element, new_elements): index = list_origine.index(old_element) - list_origine[index:index+1] = new_elements + list_origine[index : index + 1] = new_elements def _replace_element_in_list(self, list_origine, old_element, new_element): index = list_origine.index(old_element) @@ -42,27 +29,43 @@ class ConfigurationResolver: for key, value in list(current_config.items()): if key == "children": if value is None or not isinstance(value, list): - raise ValueError(f"Expected 'children' to be a list, but got {type(value).__name__} instead.") + raise ValueError( + "Expected 'children' to be a list, but got " + f"{type(value).__name__} instead." + ) for item in value: if "link" in item: - loaded_link = self._find_entry(root_config, self._mapped_key(item['link']), False) + loaded_link = self._find_entry( + root_config, + self._mapped_key(item["link"]), + False, + ) if isinstance(loaded_link, list): - self._replace_in_list_by_list(value,item,loaded_link) + self._replace_in_list_by_list(value, item, loaded_link) else: - self._replace_element_in_list(value,item,loaded_link) + self._replace_element_in_list(value, item, loaded_link) else: - self._recursive_resolve(value, root_config) + self._recursive_resolve(value, root_config) elif key == "link": try: - loaded = self._find_entry(root_config, self._mapped_key(value), False) + loaded = self._find_entry( + root_config, self._mapped_key(value), False + ) if isinstance(loaded, list) and len(loaded) > 2: - loaded = self._find_entry(root_config, self._mapped_key(value), False) + loaded = self._find_entry( + root_config, self._mapped_key(value), False + ) current_config.clear() current_config.update(loaded) - except Exception as e: + except Exception as e: raise ValueError( f"Error resolving link '{value}': {str(e)}. " - f"Current part: {key}, Current config: {current_config}" + (f", Loaded: {loaded}" if 'loaded' in locals() or 'loaded' in globals() else "") + f"Current part: {key}, Current config: {current_config}" + + ( + f", Loaded: {loaded}" + if "loaded" in locals() or "loaded" in globals() + else "" + ) ) else: self._recursive_resolve(value, root_config) @@ -70,69 +73,74 @@ class ConfigurationResolver: for item in current_config: self._recursive_resolve(item, root_config) - def _get_children(self,current): - if isinstance(current, dict) and ("children" in current and current["children"]): + def _get_children(self, current): + if isinstance(current, dict) and ( + "children" in current and current["children"] + ): current = current["children"] return current - def _mapped_key(self,name): + def _mapped_key(self, name): return name.replace(" ", "").lower() - - def _find_by_name(self,current, part): + + def _find_by_name(self, current, part): return next( - (item for item in current if isinstance(item, dict) and self._mapped_key(item.get("name", "")) == part), - None - ) + ( + item + for item in current + if isinstance(item, dict) + and self._mapped_key(item.get("name", "")) == part + ), + None, + ) def _find_entry(self, config, path, children): """ Finds an entry in the configuration by a dot-separated path. Supports both dictionaries and lists with `children` navigation. """ - parts = path.split('.') + parts = path.split(".") current = config for part in parts: if isinstance(current, list): - # If children explicit declared just load children if part != "children": - # Look for a matching name in the list - found = self._find_by_name(current,part) + found = self._find_by_name(current, part) if found: current = found print( - f"Matching entry for '{part}' in list. Path so far: {' > '.join(parts[:parts.index(part)+1])}. " + f"Matching entry for '{part}' in list. Path so far: " + f"{' > '.join(parts[: parts.index(part) + 1])}. " f"Current list: {current}" ) else: raise ValueError( - f"No matching entry for '{part}' in list. Path so far: {' > '.join(parts[:parts.index(part)+1])}. " + f"No matching entry for '{part}' in list. Path so far: " + f"{' > '.join(parts[: parts.index(part) + 1])}. " f"Current list: {current}" ) elif isinstance(current, dict): - # Case-insensitive dictionary lookup key = next((k for k in current if self._mapped_key(k) == part), None) - # If no fitting key was found search in the children if key is None: if "children" not in current: raise KeyError( - f"No 'children' found in current dictionary. Path so far: {' > '.join(parts[:parts.index(part)+1])}. " - f"Current dictionary: {current}" - ) - # The following line seems buggy; Why is children loaded allways and not just when children is set? - current = self._find_by_name(current["children"],part) - - if not current: - raise KeyError( - f"Key '{part}' not found in dictionary. Path so far: {' > '.join(parts[:parts.index(part)+1])}. " + "No 'children' found in current dictionary. Path so far: " + f"{' > '.join(parts[: parts.index(part) + 1])}. " f"Current dictionary: {current}" ) - else: + current = self._find_by_name(current["children"], part) + + if not current: + raise KeyError( + f"Key '{part}' not found in dictionary. Path so far: " + f"{' > '.join(parts[: parts.index(part) + 1])}. " + f"Current dictionary: {current}" + ) + else: current = current[key] - else: raise ValueError( f"Invalid path segment '{part}'. Current type: {type(current)}. " - f"Path so far: {' > '.join(parts[:parts.index(part)+1])}" + f"Path so far: {' > '.join(parts[: parts.index(part) + 1])}" ) if children: current = self._get_children(current) diff --git a/main.py b/main.py index 7c716f6..3fde73b 100755 --- a/main.py +++ b/main.py @@ -3,10 +3,11 @@ main.py - Proxy to Makefile targets for managing the Portfolio CMS Docker application. Automatically generates CLI commands based on the Makefile definitions. """ + import argparse +import re import subprocess import sys -import re from pathlib import Path MAKEFILE_PATH = Path(__file__).resolve().parent / "Makefile" @@ -20,16 +21,17 @@ def load_targets(makefile_path): """ targets = [] pattern = re.compile(r"^([A-Za-z0-9_\-]+):") - with open(makefile_path, 'r') as f: - lines = f.readlines() + with open(makefile_path, "r", encoding="utf-8") as handle: + lines = handle.readlines() for idx, line in enumerate(lines): m = pattern.match(line) if m: name = m.group(1) - help_text = '' - # look for next non-empty line - if idx + 1 < len(lines) and lines[idx+1].lstrip().startswith('#'): - help_text = lines[idx+1].lstrip('# ').strip() + help_text = "" + if idx + 1 < len(lines): + next_line = lines[idx + 1].lstrip() + if next_line.startswith("#"): + help_text = next_line.lstrip("# ").strip() targets.append((name, help_text)) return targets @@ -54,13 +56,13 @@ def main(): parser.add_argument( "--dry-run", action="store_true", - help="Print the generated Make command without executing it." + help="Print the generated Make command without executing it.", ) subparsers = parser.add_subparsers( title="Available commands", dest="command", - required=True + required=True, ) targets = load_targets(MAKEFILE_PATH) @@ -69,15 +71,9 @@ def main(): sp.set_defaults(target=name) args = parser.parse_args() - - if not args.command: - parser.print_help() - sys.exit(1) - cmd = ["make", args.target] run_command(cmd, dry_run=args.dry_run) if __name__ == "__main__": - from pathlib import Path - main() \ No newline at end of file + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..f40e37a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,44 @@ +[build-system] +requires = ["setuptools>=69"] +build-backend = "setuptools.build_meta" + +[project] +name = "portfolio-ui" +version = "0.0.0" +description = "A lightweight YAML-driven portfolio and landing-page generator." +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "flask", + "pyyaml", + "requests", +] + +[project.optional-dependencies] +dev = [ + "bandit", + "pip-audit", + "ruff", +] + +[tool.setuptools] +py-modules = ["main"] + +[tool.setuptools.packages.find] +include = ["app", "app.*"] + +[tool.setuptools.package-data] +app = [ + "config.sample.yaml", + "templates/**/*.j2", + "static/css/*.css", + "static/js/*.js", +] + +[tool.ruff] +target-version = "py312" +line-length = 88 +extend-exclude = ["app/static/cache", "build"] + +[tool.ruff.lint] +select = ["E", "F", "I"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3e338bf..0000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -python-dotenv \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/test_python_packaging.py b/tests/integration/test_python_packaging.py new file mode 100644 index 0000000..ed748dc --- /dev/null +++ b/tests/integration/test_python_packaging.py @@ -0,0 +1,54 @@ +import tomllib +import unittest +from pathlib import Path + + +class TestPythonPackaging(unittest.TestCase): + def setUp(self) -> None: + self.repo_root = Path(__file__).resolve().parents[2] + self.pyproject_path = self.repo_root / "pyproject.toml" + + with self.pyproject_path.open("rb") as handle: + self.pyproject = tomllib.load(handle) + + def test_pyproject_defines_build_system_and_runtime_dependencies(self): + build_system = self.pyproject["build-system"] + project = self.pyproject["project"] + + self.assertEqual(build_system["build-backend"], "setuptools.build_meta") + self.assertIn("setuptools>=69", build_system["requires"]) + self.assertGreaterEqual( + set(project["dependencies"]), + {"flask", "pyyaml", "requests"}, + ) + self.assertEqual(project["requires-python"], ">=3.12") + + def test_pyproject_defines_dev_dependencies_and_package_contents(self): + project = self.pyproject["project"] + setuptools_config = self.pyproject["tool"]["setuptools"] + package_find = setuptools_config["packages"]["find"] + package_data = setuptools_config["package-data"]["app"] + + self.assertGreaterEqual( + set(project["optional-dependencies"]["dev"]), + {"bandit", "pip-audit", "ruff"}, + ) + self.assertEqual(setuptools_config["py-modules"], ["main"]) + self.assertEqual(package_find["include"], ["app", "app.*"]) + self.assertIn("config.sample.yaml", package_data) + self.assertIn("templates/**/*.j2", package_data) + self.assertIn("static/css/*.css", package_data) + self.assertIn("static/js/*.js", package_data) + + def test_legacy_requirements_files_are_removed(self): + self.assertFalse((self.repo_root / "requirements.txt").exists()) + self.assertFalse((self.repo_root / "requirements-dev.txt").exists()) + self.assertFalse((self.repo_root / "app" / "requirements.txt").exists()) + + def test_package_init_files_exist(self): + self.assertTrue((self.repo_root / "app" / "__init__.py").is_file()) + self.assertTrue((self.repo_root / "app" / "utils" / "__init__.py").is_file()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration/test_yaml_syntax.py b/tests/integration/test_yaml_syntax.py new file mode 100644 index 0000000..245e1a1 --- /dev/null +++ b/tests/integration/test_yaml_syntax.py @@ -0,0 +1,43 @@ +import unittest +from pathlib import Path + +import yaml + +SKIP_DIR_NAMES = {".git", ".ruff_cache", "__pycache__", "node_modules"} +SKIP_FILES = {"app/config.yaml"} +YAML_SUFFIXES = {".yml", ".yaml"} + + +class TestYamlSyntax(unittest.TestCase): + def test_all_repository_yaml_files_are_valid(self): + repo_root = Path(__file__).resolve().parents[2] + invalid_files = [] + + for path in repo_root.rglob("*"): + if not path.is_file() or path.suffix not in YAML_SUFFIXES: + continue + + relative_path = path.relative_to(repo_root).as_posix() + if relative_path in SKIP_FILES: + continue + + if any(part in SKIP_DIR_NAMES for part in path.parts): + continue + + try: + with path.open("r", encoding="utf-8") as handle: + yaml.safe_load(handle) + except yaml.YAMLError as error: + invalid_files.append((relative_path, str(error).splitlines()[0])) + except Exception as error: + invalid_files.append((relative_path, f"Unexpected error: {error}")) + + self.assertFalse( + invalid_files, + "Found invalid YAML files:\n" + + "\n".join(f"- {path}: {error}" for path, error in invalid_files), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/lint/__init__.py b/tests/lint/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/lint/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/lint/test_all_test_files_have_tests.py b/tests/lint/test_all_test_files_have_tests.py new file mode 100644 index 0000000..c407a7c --- /dev/null +++ b/tests/lint/test_all_test_files_have_tests.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +import ast +import unittest +from pathlib import Path + + +class TestTestFilesContainUnittestTests(unittest.TestCase): + def setUp(self) -> None: + self.repo_root = Path(__file__).resolve().parents[2] + self.tests_dir = self.repo_root / "tests" + self.assertTrue( + self.tests_dir.is_dir(), + f"'tests' directory not found at: {self.tests_dir}", + ) + + def _iter_test_files(self) -> list[Path]: + return sorted(self.tests_dir.rglob("test_*.py")) + + def _file_contains_runnable_unittest_test(self, path: Path) -> bool: + source = path.read_text(encoding="utf-8") + + try: + tree = ast.parse(source, filename=str(path)) + except SyntaxError as error: + raise AssertionError(f"SyntaxError in {path}: {error}") from error + + testcase_aliases = {"TestCase"} + unittest_aliases = {"unittest"} + + for node in tree.body: + if isinstance(node, ast.Import): + for import_name in node.names: + if import_name.name == "unittest": + unittest_aliases.add(import_name.asname or "unittest") + elif isinstance(node, ast.ImportFrom) and node.module == "unittest": + for import_name in node.names: + if import_name.name == "TestCase": + testcase_aliases.add(import_name.asname or "TestCase") + + def is_testcase_base(base: ast.expr) -> bool: + if isinstance(base, ast.Name) and base.id in testcase_aliases: + return True + + if isinstance(base, ast.Attribute) and base.attr == "TestCase": + return ( + isinstance(base.value, ast.Name) + and base.value.id in unittest_aliases + ) + + return False + + for node in tree.body: + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)) and ( + node.name.startswith("test_") + ): + return True + + for node in tree.body: + if not isinstance(node, ast.ClassDef): + continue + + if not any(is_testcase_base(base) for base in node.bases): + continue + + for item in node.body: + if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)) and ( + item.name.startswith("test_") + ): + return True + + return False + + def test_all_test_py_files_contain_runnable_tests(self) -> None: + test_files = self._iter_test_files() + self.assertTrue(test_files, "No test_*.py files found under tests/") + + offenders = [] + for path in test_files: + if not self._file_contains_runnable_unittest_test(path): + offenders.append(path.relative_to(self.repo_root).as_posix()) + + self.assertFalse( + offenders, + "These test_*.py files do not define any unittest-runnable tests:\n" + + "\n".join(f"- {path}" for path in offenders), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/lint/test_test_file_naming.py b/tests/lint/test_test_file_naming.py new file mode 100644 index 0000000..b642110 --- /dev/null +++ b/tests/lint/test_test_file_naming.py @@ -0,0 +1,25 @@ +import unittest +from pathlib import Path + + +class TestTestFileNaming(unittest.TestCase): + def test_all_python_files_use_test_prefix(self): + tests_root = Path(__file__).resolve().parents[1] + invalid_files = [] + + for path in tests_root.rglob("*.py"): + if path.name == "__init__.py": + continue + + if not path.name.startswith("test_"): + invalid_files.append(path.relative_to(tests_root).as_posix()) + + self.assertFalse( + invalid_files, + "The following Python files do not start with 'test_':\n" + + "\n".join(f"- {path}" for path in invalid_files), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/security/__init__.py b/tests/security/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/security/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/security/test_config_hygiene.py b/tests/security/test_config_hygiene.py new file mode 100644 index 0000000..2a3df43 --- /dev/null +++ b/tests/security/test_config_hygiene.py @@ -0,0 +1,57 @@ +import subprocess +import unittest +from pathlib import Path + +import yaml + + +class TestConfigHygiene(unittest.TestCase): + def setUp(self) -> None: + self.repo_root = Path(__file__).resolve().parents[2] + self.sample_config_path = self.repo_root / "app" / "config.sample.yaml" + + def _is_tracked(self, path: str) -> bool: + result = subprocess.run( + ["git", "ls-files", "--error-unmatch", path], + cwd=self.repo_root, + check=False, + capture_output=True, + text=True, + ) + return result.returncode == 0 + + def _find_values_for_key(self, data, key_name: str): + if isinstance(data, dict): + for key, value in data.items(): + if key == key_name: + yield value + yield from self._find_values_for_key(value, key_name) + elif isinstance(data, list): + for item in data: + yield from self._find_values_for_key(item, key_name) + + def test_runtime_only_files_are_ignored_and_untracked(self): + gitignore_lines = ( + (self.repo_root / ".gitignore").read_text(encoding="utf-8").splitlines() + ) + + self.assertIn("app/config.yaml", gitignore_lines) + self.assertIn(".env", gitignore_lines) + self.assertFalse(self._is_tracked("app/config.yaml")) + self.assertFalse(self._is_tracked(".env")) + + def test_sample_config_keeps_the_nasa_api_key_placeholder(self): + with self.sample_config_path.open("r", encoding="utf-8") as handle: + sample_config = yaml.safe_load(handle) + + nasa_api_keys = list(self._find_values_for_key(sample_config, "nasa_api_key")) + self.assertEqual( + nasa_api_keys, + ["YOUR_REAL_KEY_HERE"], + "config.sample.yaml should only contain the documented NASA API key " + "placeholder.", + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/security/test_sample_config_urls.py b/tests/security/test_sample_config_urls.py new file mode 100644 index 0000000..718ff11 --- /dev/null +++ b/tests/security/test_sample_config_urls.py @@ -0,0 +1,43 @@ +import unittest +from pathlib import Path + +import yaml + +ALLOWED_URL_PREFIXES = ("https://", "mailto:", "tel:") +URL_KEYS = {"url", "imprint", "imprint_url"} + + +class TestSampleConfigUrls(unittest.TestCase): + def setUp(self) -> None: + repo_root = Path(__file__).resolve().parents[2] + sample_config_path = repo_root / "app" / "config.sample.yaml" + with sample_config_path.open("r", encoding="utf-8") as handle: + self.sample_config = yaml.safe_load(handle) + + def _iter_urls(self, data, path="root"): + if isinstance(data, dict): + for key, value in data.items(): + next_path = f"{path}.{key}" + if key in URL_KEYS and isinstance(value, str): + yield next_path, value + yield from self._iter_urls(value, next_path) + elif isinstance(data, list): + for index, item in enumerate(data): + yield from self._iter_urls(item, f"{path}[{index}]") + + def test_sample_config_urls_use_safe_schemes(self): + invalid_urls = [ + f"{path} -> {url}" + for path, url in self._iter_urls(self.sample_config) + if not url.startswith(ALLOWED_URL_PREFIXES) + ] + + self.assertFalse( + invalid_urls, + "The sample config contains URLs with unsupported schemes:\n" + + "\n".join(f"- {entry}" for entry in invalid_urls), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4ff78ac --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ +"""Unit test package for Portfolio UI.""" diff --git a/tests/unit/test_cache_manager.py b/tests/unit/test_cache_manager.py new file mode 100644 index 0000000..bf5f9e0 --- /dev/null +++ b/tests/unit/test_cache_manager.py @@ -0,0 +1,72 @@ +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import Mock, patch + +import requests + +from app.utils.cache_manager import CacheManager + + +class TestCacheManager(unittest.TestCase): + def test_init_creates_cache_directory(self): + with TemporaryDirectory() as temp_dir: + cache_dir = Path(temp_dir) / "cache" + + self.assertFalse(cache_dir.exists()) + + CacheManager(str(cache_dir)) + + self.assertTrue(cache_dir.is_dir()) + + def test_clear_cache_removes_files_but_keeps_subdirectories(self): + with TemporaryDirectory() as temp_dir: + cache_dir = Path(temp_dir) / "cache" + nested_dir = cache_dir / "nested" + nested_dir.mkdir(parents=True) + file_path = cache_dir / "icon.png" + file_path.write_bytes(b"icon") + + manager = CacheManager(str(cache_dir)) + manager.clear_cache() + + self.assertFalse(file_path.exists()) + self.assertTrue(nested_dir.is_dir()) + + @patch("app.utils.cache_manager.requests.get") + def test_cache_file_downloads_and_stores_response(self, mock_get): + with TemporaryDirectory() as temp_dir: + manager = CacheManager(str(Path(temp_dir) / "cache")) + response = Mock() + response.headers = {"Content-Type": "image/svg+xml; charset=utf-8"} + response.iter_content.return_value = [b"ok"] + response.raise_for_status.return_value = None + mock_get.return_value = response + + cached_path = manager.cache_file("https://example.com/logo/download") + + self.assertIsNotNone(cached_path) + self.assertTrue(cached_path.startswith("cache/logo_")) + self.assertTrue(cached_path.endswith(".svg")) + + stored_file = Path(manager.cache_dir) / Path(cached_path).name + self.assertEqual(stored_file.read_bytes(), b"ok") + mock_get.assert_called_once_with( + "https://example.com/logo/download", + stream=True, + timeout=5, + ) + + @patch("app.utils.cache_manager.requests.get") + def test_cache_file_returns_none_when_request_fails(self, mock_get): + with TemporaryDirectory() as temp_dir: + manager = CacheManager(str(Path(temp_dir) / "cache")) + mock_get.side_effect = requests.RequestException("network") + + cached_path = manager.cache_file("https://example.com/icon.png") + + self.assertIsNone(cached_path) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_check_hadolint_sarif.py b/tests/unit/test_check_hadolint_sarif.py new file mode 100644 index 0000000..85906d4 --- /dev/null +++ b/tests/unit/test_check_hadolint_sarif.py @@ -0,0 +1,49 @@ +import json +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory + +from utils import check_hadolint_sarif + + +class TestCheckHadolintSarif(unittest.TestCase): + def test_main_returns_zero_for_clean_sarif(self): + sarif_payload = { + "runs": [ + { + "results": [], + } + ] + } + + with TemporaryDirectory() as temp_dir: + sarif_path = Path(temp_dir) / "clean.sarif" + sarif_path.write_text(json.dumps(sarif_payload), encoding="utf-8") + + exit_code = check_hadolint_sarif.main([str(sarif_path)]) + + self.assertEqual(exit_code, 0) + + def test_main_returns_one_for_warnings_or_errors(self): + sarif_payload = { + "runs": [ + { + "results": [ + {"level": "warning"}, + {"level": "error"}, + ], + } + ] + } + + with TemporaryDirectory() as temp_dir: + sarif_path = Path(temp_dir) / "warnings.sarif" + sarif_path.write_text(json.dumps(sarif_payload), encoding="utf-8") + + exit_code = check_hadolint_sarif.main([str(sarif_path)]) + + self.assertEqual(exit_code, 1) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_compute_card_classes.py b/tests/unit/test_compute_card_classes.py new file mode 100644 index 0000000..a683957 --- /dev/null +++ b/tests/unit/test_compute_card_classes.py @@ -0,0 +1,39 @@ +import unittest + +from app.utils.compute_card_classes import compute_card_classes + + +class TestComputeCardClasses(unittest.TestCase): + def test_single_card_uses_full_width_classes(self): + lg_classes, md_classes = compute_card_classes([{"title": "One"}]) + + self.assertEqual(lg_classes, ["col-lg-12"]) + self.assertEqual(md_classes, ["col-md-12"]) + + def test_two_cards_split_evenly(self): + lg_classes, md_classes = compute_card_classes([{}, {}]) + + self.assertEqual(lg_classes, ["col-lg-6", "col-lg-6"]) + self.assertEqual(md_classes, ["col-md-6", "col-md-6"]) + + def test_three_cards_use_thirds(self): + lg_classes, md_classes = compute_card_classes([{}, {}, {}]) + + self.assertEqual(lg_classes, ["col-lg-4", "col-lg-4", "col-lg-4"]) + self.assertEqual(md_classes, ["col-md-6", "col-md-6", "col-md-12"]) + + def test_five_cards_use_balanced_large_layout(self): + lg_classes, md_classes = compute_card_classes([{}, {}, {}, {}, {}]) + + self.assertEqual( + lg_classes, + ["col-lg-6", "col-lg-6", "col-lg-4", "col-lg-4", "col-lg-4"], + ) + self.assertEqual( + md_classes, + ["col-md-6", "col-md-6", "col-md-6", "col-md-6", "col-md-12"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_configuration_resolver.py b/tests/unit/test_configuration_resolver.py new file mode 100644 index 0000000..bad83e7 --- /dev/null +++ b/tests/unit/test_configuration_resolver.py @@ -0,0 +1,74 @@ +import unittest + +from app.utils.configuration_resolver import ConfigurationResolver + + +class TestConfigurationResolver(unittest.TestCase): + def test_resolve_links_replaces_mapping_link_with_target_object(self): + config = { + "profiles": [ + {"name": "Mastodon", "url": "https://example.com/@user"}, + ], + "featured": {"link": "profiles.mastodon"}, + } + + resolver = ConfigurationResolver(config) + resolver.resolve_links() + + self.assertEqual( + resolver.get_config()["featured"], + {"name": "Mastodon", "url": "https://example.com/@user"}, + ) + + def test_resolve_links_expands_children_link_to_list_entries(self): + config = { + "accounts": { + "children": [ + {"name": "Matrix", "url": "https://matrix.example"}, + {"name": "Signal", "url": "https://signal.example"}, + ] + }, + "navigation": { + "children": [ + {"link": "accounts.children"}, + ] + }, + } + + resolver = ConfigurationResolver(config) + resolver.resolve_links() + + self.assertEqual( + resolver.get_config()["navigation"]["children"], + [ + {"name": "Matrix", "url": "https://matrix.example"}, + {"name": "Signal", "url": "https://signal.example"}, + ], + ) + + def test_resolve_links_rejects_non_list_children(self): + config = {"navigation": {"children": {"name": "Invalid"}}} + + resolver = ConfigurationResolver(config) + + with self.assertRaises(ValueError): + resolver.resolve_links() + + def test_find_entry_handles_case_and_space_insensitive_paths(self): + config = { + "Social Networks": { + "children": [ + {"name": "Friendica", "url": "https://friendica.example"}, + ] + } + } + + resolver = ConfigurationResolver(config) + + entry = resolver._find_entry(config, "socialnetworks.friendica", False) + + self.assertEqual(entry["url"], "https://friendica.example") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_export_runtime_requirements.py b/tests/unit/test_export_runtime_requirements.py new file mode 100644 index 0000000..71fa8c8 --- /dev/null +++ b/tests/unit/test_export_runtime_requirements.py @@ -0,0 +1,45 @@ +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + +from utils import export_runtime_requirements + + +class TestExportRuntimeRequirements(unittest.TestCase): + def test_load_runtime_requirements_reads_project_dependencies(self): + pyproject_content = """ +[project] +dependencies = [ + "flask", + "requests>=2", +] +""".lstrip() + + with TemporaryDirectory() as temp_dir: + pyproject_path = Path(temp_dir) / "pyproject.toml" + pyproject_path.write_text(pyproject_content, encoding="utf-8") + + requirements = export_runtime_requirements.load_runtime_requirements( + pyproject_path + ) + + self.assertEqual(requirements, ["flask", "requests>=2"]) + + def test_main_prints_requirements_from_selected_pyproject(self): + pyproject_content = """ +[project] +dependencies = [ + "pyyaml", +] +""".lstrip() + + with TemporaryDirectory() as temp_dir: + pyproject_path = Path(temp_dir) / "pyproject.toml" + pyproject_path.write_text(pyproject_content, encoding="utf-8") + + with patch("builtins.print") as mock_print: + exit_code = export_runtime_requirements.main([str(pyproject_path)]) + + self.assertEqual(exit_code, 0) + mock_print.assert_called_once_with("pyyaml") diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py new file mode 100644 index 0000000..70b839d --- /dev/null +++ b/tests/unit/test_main.py @@ -0,0 +1,72 @@ +import subprocess +import unittest +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest.mock import patch + +import main as portfolio_main + + +class TestMainCli(unittest.TestCase): + def test_load_targets_parses_help_comments(self): + makefile_content = """ +.PHONY: foo bar +foo: +\t# Run foo +\t@echo foo + +bar: +\t@echo bar +""".lstrip() + + with TemporaryDirectory() as temp_dir: + makefile_path = Path(temp_dir) / "Makefile" + makefile_path.write_text(makefile_content, encoding="utf-8") + + targets = portfolio_main.load_targets(makefile_path) + + self.assertEqual(targets, [("foo", "Run foo"), ("bar", "")]) + + @patch("main.subprocess.check_call") + def test_run_command_executes_subprocess(self, mock_check_call): + portfolio_main.run_command(["make", "lint"]) + + mock_check_call.assert_called_once_with(["make", "lint"]) + + @patch("main.sys.exit", side_effect=SystemExit(7)) + @patch( + "main.subprocess.check_call", + side_effect=subprocess.CalledProcessError(7, ["make", "lint"]), + ) + def test_run_command_exits_with_subprocess_return_code( + self, + _mock_check_call, + mock_sys_exit, + ): + with self.assertRaises(SystemExit) as context: + portfolio_main.run_command(["make", "lint"]) + + self.assertEqual(context.exception.code, 7) + mock_sys_exit.assert_called_once_with(7) + + @patch("main.run_command") + @patch("main.load_targets", return_value=[("lint", "Run lint suite")]) + def test_main_dispatches_selected_target( + self, _mock_load_targets, mock_run_command + ): + with patch("sys.argv", ["main.py", "lint"]): + portfolio_main.main() + + mock_run_command.assert_called_once_with(["make", "lint"], dry_run=False) + + @patch("main.run_command") + @patch("main.load_targets", return_value=[("lint", "Run lint suite")]) + def test_main_passes_dry_run_flag(self, _mock_load_targets, mock_run_command): + with patch("sys.argv", ["main.py", "--dry-run", "lint"]): + portfolio_main.main() + + mock_run_command.assert_called_once_with(["make", "lint"], dry_run=True) + + +if __name__ == "__main__": + unittest.main() diff --git a/utils/check_hadolint_sarif.py b/utils/check_hadolint_sarif.py new file mode 100644 index 0000000..d70907d --- /dev/null +++ b/utils/check_hadolint_sarif.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +"""Fail when a hadolint SARIF report contains warnings or errors.""" + +from __future__ import annotations + +import json +import sys +from pathlib import Path + + +def main(argv: list[str] | None = None) -> int: + args = argv if argv is not None else sys.argv[1:] + sarif_path = Path(args[0] if args else "hadolint-results.sarif") + + with sarif_path.open("r", encoding="utf-8") as handle: + sarif = json.load(handle) + + results = sarif.get("runs", [{}])[0].get("results", []) + levels = [result.get("level", "") for result in results] + warnings = sum(1 for level in levels if level == "warning") + errors = sum(1 for level in levels if level == "error") + + print(f"SARIF results: total={len(results)} warnings={warnings} errors={errors}") + return 1 if warnings + errors > 0 else 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/utils/export_runtime_requirements.py b/utils/export_runtime_requirements.py new file mode 100644 index 0000000..cb305cf --- /dev/null +++ b/utils/export_runtime_requirements.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +"""Print runtime dependencies from pyproject.toml, one per line.""" + +import sys +import tomllib +from pathlib import Path + +DEFAULT_PYPROJECT_PATH = Path(__file__).resolve().parents[1] / "pyproject.toml" + + +def load_runtime_requirements( + pyproject_path: Path = DEFAULT_PYPROJECT_PATH, +) -> list[str]: + with pyproject_path.open("rb") as handle: + pyproject = tomllib.load(handle) + return list(pyproject["project"]["dependencies"]) + + +def main(argv: list[str] | None = None) -> int: + args = argv if argv is not None else sys.argv[1:] + pyproject_path = Path(args[0]) if args else DEFAULT_PYPROJECT_PATH + + for requirement in load_runtime_requirements(pyproject_path): + print(requirement) + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())