diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index 7204c57e0..000000000 --- a/.codecov.yml +++ /dev/null @@ -1,508 +0,0 @@ -# ============================================================================== -# TUX DISCORD BOT - CODECOV CONFIGURATION -# ============================================================================== -# -# This configuration file defines comprehensive code coverage tracking and -# reporting for the Tux Discord Bot project. It implements tiered coverage -# standards, component-based tracking, and intelligent CI integration. -# -# COVERAGE PHILOSOPHY: -# ------------------- -# - Higher standards for critical components (database, core infrastructure) -# - Moderate standards for features and utilities -# - Lower standards for external API wrappers (limited by external dependencies) -# - Strict requirements for new code (patch coverage) -# -# COMPONENT STRUCTURE: -# -------------------- -# 1. Core Infrastructure - Bot startup, event handling (80% target) -# 2. Database Layer - Data persistence, queries (90% target) -# 3. Bot Commands - User-facing features (75% target) -# 4. Event Handlers - Error handling, stability (80% target) -# 5. Utilities - Helper functions (70% target) -# 6. UI Components - Discord interface elements (70% target) -# 7. CLI Interface - Command-line tools (65% target) -# 8. External Wrappers - Third-party API clients (60% target) -# -# CI INTEGRATION: -# --------------- -# Flags: unit (main tests), database (specific DB tests), integration (e2e tests) -# Reports: Optimized for PR feedback and main branch validation -# Timing: Comments appear after first report for faster feedback -# -# DOCUMENTATION: -# -------------- -# Official Codecov docs: https://docs.codecov.com/docs/codecov-yaml -# Company-specific examples: https://github.com/codecov/example-python -# -# ============================================================================== -# ============================================================================== -# GLOBAL COVERAGE CONFIGURATION -# ============================================================================== -# Purpose: Defines overall coverage behavior, precision, and display preferences -# Impact: Affects all coverage calculations and visual representations -# ============================================================================== -coverage: - # PRECISION AND DISPLAY SETTINGS - # precision: Number of decimal places shown in coverage percentages (0-5) - # round: How to handle rounding (down = conservative, up = optimistic, nearest = balanced) - # range: Color coding thresholds for visual coverage indicators (red...green) - precision: 2 - round: down - range: 70...100 - - # ============================================================================== - # STATUS CHECKS CONFIGURATION - # ============================================================================== - # Purpose: Controls PR status checks and blocking behavior - # Impact: Determines which changes block merging and which are informational - # ============================================================================== - status: - # GLOBAL STATUS RULES - # Applied to all status checks unless overridden by specific configurations - # These settings ensure consistent behavior across all coverage types - default_rules: - # flag_coverage_not_uploaded_behavior: How to handle missing flag data - # exclude = Don't send status if flag data missing (prevents false failures) - flag_coverage_not_uploaded_behavior: exclude - - # PROJECT-WIDE COVERAGE REQUIREMENTS - # These checks apply to the entire codebase and determine PR merge eligibility - project: - # OVERALL PROJECT COVERAGE - # Main coverage check that applies to all code changes - default: - target: auto # Compare to base commit (progressive improvement) - threshold: 1% # Allow 1% coverage drop (accounts for refactoring) - informational: true # Don't block PRs while building up test suite - - # ======================================================================== - # COMPONENT-SPECIFIC PROJECT COVERAGE - # ======================================================================== - # Purpose: Different standards for different parts of the codebase - # Rationale: Critical components need higher coverage than utilities - # ======================================================================== - - # CORE BOT INFRASTRUCTURE (Critical - 80% target) - # Files that control bot startup, shutdown, and core event handling - # High standards because failures here affect entire bot operation - core: - target: 80% - threshold: 2% # Stricter threshold for critical code - informational: true # Don't block PRs while building up test suite - flags: # Covered by main unit test suite - - unit - paths: - - tux/bot.py # Main bot class and Discord client setup - - tux/cog_loader.py # Extension loading and management - - tux/help.py # Help system and command documentation - - tux/main.py # Application entry point - - tux/app.py # Application initialization - only_pulls: true # Only check on PRs to avoid noise on main - - # DATABASE LAYER (Highest standards - 90% target) - # All database operations, models, and data persistence logic - # Highest standards due to data integrity and security implications - database: - target: 90% - threshold: 1% # Very strict threshold for data operations - informational: true # Don't block PRs while building up test suite - flags: # Covered by both unit and database-specific tests - - unit - - database - paths: - - tux/database/**/* # All database controllers, models, and utilities - only_pulls: true - - # BOT COMMANDS AND FEATURES (High standards - 75% target) - # User-facing commands and Discord integrations - # High standards because these directly impact user experience - cogs: - target: 75% - threshold: 2% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/cogs/**/* # All command cogs and Discord slash commands - only_pulls: true - - # UTILITIES AND HELPERS (Moderate standards - 70% target) - # Supporting functions, converters, and helper utilities - # Moderate standards as these are typically simpler, pure functions - utils: - target: 70% - threshold: 3% # More lenient for utility functions - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/utils/**/* # Configuration, helpers, constants, etc. - only_pulls: true - - # CLI INTERFACE (Moderate standards - 65% target) - # Command-line tools and development utilities - # Lower standards as CLI tools often have complex argument parsing - cli: - target: 65% - threshold: 3% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/cli/**/* # Development and management CLI tools - only_pulls: true - - # EVENT AND ERROR HANDLING (High standards - 80% target) - # Error handlers, event processors, and system stability code - # High standards because failures here affect bot reliability - handlers: - target: 80% - threshold: 2% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/handlers/**/* # Error handlers, event processors, activity handlers - only_pulls: true - - # USER INTERFACE COMPONENTS (Moderate standards - 70% target) - # Discord UI elements like embeds, buttons, modals - # Moderate standards as UI code is often presentation logic - ui: - target: 70% - threshold: 3% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/ui/**/* # Discord embeds, buttons, modals, views - only_pulls: true - - # EXTERNAL SERVICE WRAPPERS (Lower standards - 60% target) - # Third-party API clients and external service integrations - # Lower standards because testing is limited by external service availability - wrappers: - target: 60% - threshold: 4% # Most lenient threshold due to external dependencies - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/wrappers/**/* # GitHub, XKCD, Godbolt, and other API wrappers - only_pulls: true - - # ======================================================================== - # PATCH COVERAGE FOR NEW CODE - # ======================================================================== - # Purpose: Ensures new code additions meet high quality standards - # Impact: Prevents coverage regression from new development - # ======================================================================== - patch: - # DEFAULT PATCH COVERAGE - # Applies to all new code unless overridden by component-specific rules - default: - target: 85% # High standard for all new code - threshold: 5% # Allow some flexibility for complex implementations - informational: true # Don't block PRs while building up test suite - only_pulls: true # Only apply to PR changes, not existing code - - # CRITICAL COMPONENT PATCH COVERAGE - # Stricter requirements for new code in critical areas - - # DATABASE PATCH COVERAGE (Strictest - 95% target) - # New database code must be extremely well tested - database-patch: - target: 95% - threshold: 2% # Very strict for new database operations - informational: true # Don't block PRs while building up test suite - flags: - - database - paths: - - tux/database/**/* - - # CORE INFRASTRUCTURE PATCH COVERAGE (Very strict - 90% target) - # New core bot functionality must be thoroughly tested - core-patch: - target: 90% - threshold: 3% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/bot.py - - tux/cog_loader.py - - tux/help.py - - # ERROR HANDLER PATCH COVERAGE (Very strict - 90% target) - # New error handling code must be comprehensive - handlers-patch: - target: 90% - threshold: 3% - informational: true # Don't block PRs while building up test suite - flags: - - unit - paths: - - tux/handlers/**/* -# ============================================================================== -# PULL REQUEST COMMENT CONFIGURATION -# ============================================================================== -# Purpose: Controls how Codecov comments appear on pull requests -# Impact: Affects developer experience and coverage visibility -# ============================================================================== -comment: - # COMMENT LAYOUT AND CONTENT - # layout: Defines which sections appear in PR comments and their order - # Options: header, diff, flags, components, files, footer, etc. - layout: condensed_header, diff, flags, components, condensed_files, condensed_footer - - # COMMENT BEHAVIOR SETTINGS - behavior: default # Update existing comments instead of creating new ones - require_changes: true # Only comment when coverage actually changes - require_base: false # Don't require base coverage (helps with first PRs) - require_head: true # Require head coverage to generate meaningful comments - hide_project_coverage: false # Show project-wide coverage changes - - # TIMING CONFIGURATION - # after_n_builds: How many coverage reports to wait for before commenting - # 1 = Comment after first report arrives, update with subsequent reports - # This provides faster feedback while still showing complete picture - after_n_builds: 1 - - # TRANSPARENCY FEATURES - # show_carryforward_flags: Display which coverage data is carried over - # Helps developers understand why certain components might show no change - show_carryforward_flags: true -# ============================================================================== -# IGNORE PATTERNS -# ============================================================================== -# Purpose: Excludes files from coverage calculation that shouldn't be tested -# Impact: Focuses coverage metrics on actual application code -# ============================================================================== -ignore: - # TEST AND DEVELOPMENT FILES - # Files that test the application or support development workflows - - tests/**/* # All test files (shouldn't test the tests) - - conftest.py # Pytest configuration and fixtures - - # BUILD AND CACHE ARTIFACTS - # Generated files and build artifacts that change frequently - - '**/__pycache__/**/*' # Python bytecode cache - - .pytest_cache/**/* # Pytest cache directory - - .ruff_cache/**/* # Ruff linter cache - - htmlcov/**/* # Coverage HTML reports - - # PYTHON ENVIRONMENT FILES - # Virtual environment and dependency management files - - .venv/**/* # Virtual environment - - typings/**/* # Type stubs and typing files - - # PROJECT MANAGEMENT FILES - # Documentation, configuration, and project management files - - .archive/**/* # Archived/deprecated code - - docs/**/* # Documentation source files - - scripts/**/* # Utility scripts and automation - - assets/**/* # Static assets (images, sounds, etc.) - - logs/**/* # Application log files - - '*.md' # Markdown documentation files - - # CONFIGURATION FILES - # Project configuration that doesn't contain application logic - - '*.toml' # Poetry, pyproject.toml, etc. - - '*.lock' # Dependency lock files - - setup.py # Python package setup files - - # NIX DEVELOPMENT ENVIRONMENT - # Nix package manager and development environment files - - '*.nix' # Nix configuration files - - flake.* # Nix flake files - - shell.nix # Nix development shell - - # EXTERNAL DEPENDENCIES - # Third-party code and generated files we don't control - - prisma/**/* # Prisma ORM generated files -# ============================================================================== -# COMPONENT MANAGEMENT -# ============================================================================== -# Purpose: Organizes codebase into logical components for better tracking -# Impact: Provides component-level coverage insights and organization -# ============================================================================== -component_management: - # DEFAULT COMPONENT RULES - # Applied to all components unless overridden - default_rules: - flag_regexes: # Most components covered by unit tests - - unit - statuses: - - type: project - target: auto # Progressive improvement for all components - threshold: 1% - - # INDIVIDUAL COMPONENT DEFINITIONS - # Each component represents a logical part of the application - individual_components: - # CORE BOT INFRASTRUCTURE COMPONENT - # Central bot functionality and startup logic - - component_id: core - name: Core Bot Infrastructure - paths: - - tux/bot.py # Main Discord bot client - - tux/cog_loader.py # Extension/cog management - - tux/help.py # Help system implementation - - tux/main.py # Application entry point - - tux/app.py # Application setup and configuration - flag_regexes: - - unit - - # DATABASE LAYER COMPONENT - # All data persistence and database operations - - component_id: database - name: Database Layer - paths: - - tux/database/**/* # Controllers, models, client, and utilities - flag_regexes: # Covered by both unit and DB-specific tests - - # BOT COMMANDS AND FEATURES COMPONENT - # User-facing Discord commands and integrations - - unit - - database - - component_id: cogs - name: Bot Commands & Features - paths: - - tux/cogs/**/* # All command cogs organized by category - flag_regexes: - - unit - - # EVENT AND ERROR HANDLING COMPONENT - # System stability, error handling, and event processing - - component_id: handlers - name: Event & Error Handling - paths: - - tux/handlers/**/* # Error handlers, event processors, activity tracking - flag_regexes: - - unit - - # UTILITIES AND HELPERS COMPONENT - # Supporting functions, configuration, and shared utilities - - component_id: utils - name: Utilities & Helpers - paths: - - tux/utils/**/* # Constants, functions, config, logging, etc. - flag_regexes: - - unit - - # USER INTERFACE COMPONENTS - # Discord-specific UI elements and interactions - - component_id: ui - name: User Interface Components - paths: - - tux/ui/**/* # Embeds, buttons, modals, views - flag_regexes: - - unit - - # CLI INTERFACE COMPONENT - # Command-line tools and development utilities - - component_id: cli - name: CLI Interface - paths: - - tux/cli/**/* # Development CLI, Docker management, etc. - flag_regexes: - - unit - - # EXTERNAL SERVICE WRAPPERS COMPONENT - # Third-party API clients and external integrations - - component_id: wrappers - name: External Service Wrappers - paths: - - tux/wrappers/**/* # GitHub, XKCD, Godbolt, and other API clients - flag_regexes: - - unit -# ============================================================================== -# FLAG MANAGEMENT -# ============================================================================== -# Purpose: Defines test categories and their coverage behavior -# Impact: Controls how different types of tests contribute to coverage -# ============================================================================== -flag_management: - # DEFAULT FLAG BEHAVIOR - # Applied to all flags unless specifically overridden - default_rules: - carryforward: true # Use previous coverage when new data unavailable - statuses: - - type: project - target: auto # Progressive improvement for all flag types - threshold: 1% - - # INDIVIDUAL FLAG DEFINITIONS - # Each flag represents a different category of tests - individual_flags: - # UNIT TESTS FLAG - # Main test suite covering individual functions and classes - - name: unit - paths: # Covers all application code - - tux/ - carryforward: true - - # DATABASE TESTS FLAG - # Specific tests for database operations and data integrity - - name: database - paths: # Only covers database-related code - - tux/database/**/* - carryforward: true - - # INTEGRATION TESTS FLAG - # End-to-end tests covering full user workflows - - name: integration - paths: # Covers all application code in integrated scenarios - - tux/ - carryforward: true -# ============================================================================== -# ADVANCED CODECOV SETTINGS -# ============================================================================== -# Purpose: Fine-tune Codecov behavior for optimal CI/CD integration -# Impact: Affects upload processing, notification timing, and reliability -# ============================================================================== -codecov: - # UPLOAD AND PROCESSING SETTINGS - max_report_age: off # Disable age checking to prevent CI failures from timestamp issues - require_ci_to_pass: true # Only process coverage if CI pipeline succeeds - disable_default_path_fixes: false # Keep automatic path normalization - - # ARCHIVAL AND DEBUGGING - archive: - uploads: true # Archive uploads for debugging and compliance - - # NOTIFICATION TIMING - notify: - after_n_builds: 1 # Send notifications after first report - wait_for_ci: true # Wait for CI completion before final processing - notify_error: true # Show upload errors in PR comments for transparency -# ============================================================================== -# GITHUB INTEGRATION -# ============================================================================== -# Purpose: Enhanced integration with GitHub's pull request interface -# Impact: Provides inline coverage annotations and improved developer experience -# ============================================================================== -github_checks: - annotations: true # Show line-by-line coverage in PR file diffs -# ============================================================================== -# PARSER CONFIGURATION -# ============================================================================== -# Purpose: Configure how Codecov processes coverage reports -# Impact: Affects accuracy and completeness of coverage data -# ============================================================================== -parsers: - v1: - include_full_missed_files: true # Include files with 0% coverage in reports -# ============================================================================== -# PATH NORMALIZATION -# ============================================================================== -# Purpose: Normalize file paths for consistent reporting across environments -# Impact: Ensures coverage data is properly matched regardless of build environment -# ============================================================================== -fixes: - # Fix coverage.py path mapping issue where source path includes extra /tux - # Coverage XML shows source="/path/to/repo/tux/tux" but files are at "tux/" - # This maps the coverage paths back to the correct repository structure - - .*/tux/tux/::tux/ # Generic pattern for any environment with double tux path - - tux/tux/::tux/ # Relative path pattern fix diff --git a/.editorconfig b/.editorconfig index 5c903a8c9..3ff7765e7 100644 --- a/.editorconfig +++ b/.editorconfig @@ -34,7 +34,7 @@ indent_size = 2 # Docker files [{Dockerfile,*.dockerfile}] -indent_size = 4 +indent_size = 8 [docker-compose*.yml] indent_size = 2 @@ -65,7 +65,7 @@ indent_size = 4 indent_size = 4 # Lock files (read-only, preserve formatting) -[{poetry.lock,package-lock.json,yarn.lock,Pipfile.lock}] +[{uv.lock,package-lock.json,yarn.lock,Pipfile.lock}] insert_final_newline = false trim_trailing_whitespace = false diff --git a/.gitattributes b/.gitattributes index ddd5ccb28..108204191 100644 --- a/.gitattributes +++ b/.gitattributes @@ -70,10 +70,10 @@ docker-compose*.yml text eol=lf # # Lock Files (binary-like treatment) # -poetry.lock text eol=lf linguist-generated=true package-lock.json text eol=lf linguist-generated=true yarn.lock text eol=lf linguist-generated=true Pipfile.lock text eol=lf linguist-generated=true +uv.lock text eol=lf linguist-generated=true # # Binary Files diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 627059776..fd898cd83 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -13,15 +13,15 @@ Before you start, ensure you have: * [Python](https://www.python.org/) (3.13+ recommended) * If you don't have Python installed, we suggest using something like [mise](https://mise.jdx.dev/) or [pyenv](https://github.com/pyenv/pyenv) to manage your Python installations. -* [Poetry](https://python-poetry.org/docs/) (1.2+ recommended) - * If you don't have Poetry installed, you can use one of the official methods. We recommend using the official installer: +* [Uv](https://docs.astral.sh/uv/) (recommended) + * If you don't have Uv installed, use the official installer and verify: ```bash - # Linux, macOS, Windows (WSL) - curl -sSL https://install.python-poetry.org | python3 - + # Linux/macOS + curl -LsSf https://astral.sh/uv/install.sh | sh - # After installation and ensuring Poetry is in your PATH, you can verify it by running: - poetry --version + # Verify installation + uv --version ``` * A PostgreSQL Database (local or remote) @@ -61,19 +61,19 @@ Follow these steps to set up your local development environment. For more compre git remote -v ``` -2. **Install Dependencies with Poetry** +2. **Install Dependencies with Uv** - Ensure Poetry is installed and configured to use the correct Python version (e.g., 3.13.5). + Ensure Uv is installed and using the correct Python version (project requires 3.13.x). ```bash - # Create a virtual environment - poetry env use 3.13.5 + # (Optional) Pin the Python version used by uv + uv python pin 3.13.5 - # Install project dependencies and dev tools - poetry install + # Create the virtual environment and install all dependencies + uv sync # Install pre-commit hooks for quality checks - poetry run pre-commit install + uv run pre-commit install ``` 3. **Configure Environment Variables** @@ -94,19 +94,19 @@ Follow these steps to set up your local development environment. For more compre Copy the example settings file. - `cp config/settings.yml.example config/settings.yml` + `cp .env.example .env` - Review `config/settings.yml` and customize it. + Review `.env` and customize it. **Crucially, add your Discord User ID to the `BOT_OWNER` list.** 5. **Initialize Development Database** - Push the Prisma schema to your development database. This also generates the Prisma client. + Run database migrations to set up your development database. ```bash # Use --dev or rely on the default development mode - poetry run tux --dev db push + uv run tux --dev db upgrade ``` ## Development Workflow @@ -164,16 +164,16 @@ Follow these steps to set up your local development environment. For more compre ```bash # Format code using Ruff - poetry run tux dev format + uv run tux dev format # Lint code using Ruff - poetry run tux dev lint-fix + uv run tux dev lint-fix - # Type-check code using basedpyright - poetry run tux dev type-check + # Type-check code using Pyright + uv run tux dev type-check # Run all pre-commit checks (includes formatting, linting, etc.) - poetry run tux dev pre-commit + uv run tux dev pre-commit ``` Fix any issues reported by these tools. diff --git a/.github/actions/action-basedpyright/action.yml b/.github/actions/action-basedpyright/action.yml new file mode 100644 index 000000000..4968925d0 --- /dev/null +++ b/.github/actions/action-basedpyright/action.yml @@ -0,0 +1,56 @@ +--- +name: action-basedpyright +description: Run basedpyright with reviewdog on pull requests to improve code review + experience +inputs: + github_token: + description: GITHUB_TOKEN + default: ${{ github.token }} + workdir: + description: Working directory relative to the root directory. + default: . + ### Flags for reviewdog ### + tool_name: + description: Tool name to use for reviewdog reporter. + default: basedpyright + level: + description: Report level for reviewdog [info,warning,error]. + default: warning + reporter: + description: Reporter of reviewdog command [github-check,github-pr-review,github-pr-check,sarif]. + default: github-pr-review + filter_mode: + description: | + Filtering mode for the reviewdog command [added,diff_context,file,nofilter]. + Default is `added` except that sarif reporter uses `nofilter`. + default: file + fail_level: + description: | + If set to `none`, always use exit code 0 for reviewdog. Otherwise, exit code 1 for reviewdog if it finds at least 1 issue with severity greater than or equal to the given level. + Possible values: [none,any,info,warning,error] + Default is `none`. + default: none + reviewdog_flags: + description: Additional reviewdog flags. + default: '' + ### Flags for basedpyright ### + basedpyright_flags: + description: Additional flags for basedpyright command. + default: --outputjson +runs: + using: composite + steps: + - name: Run basedpyright with reviewdog + shell: bash + working-directory: ${{ inputs.workdir }} + run: | + (uv run basedpyright ${{ inputs.basedpyright_flags }} || true) | \ + reviewdog -f=rdjson \ + -reporter=${{ inputs.reporter }} \ + -level=${{ inputs.level }} \ + -filter-mode=${{ inputs.filter_mode }} \ + -fail-level=${{ inputs.fail_level }} \ + -name=${{ inputs.tool_name }} \ + ${{ inputs.reviewdog_flags }} + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ inputs.github_token }} diff --git a/.github/actions/create-test-env/action.yml b/.github/actions/create-test-env/action.yml index 11302a50f..3be5fb6ef 100644 --- a/.github/actions/create-test-env/action.yml +++ b/.github/actions/create-test-env/action.yml @@ -1,10 +1,8 @@ +--- name: Create Test Environment -description: Create .env file with test configuration for CI/testing purposes +description: Create .env file with test configuration for CI/testing purposes using + pydantic settings inputs: - database-url: - description: Database URL for testing - required: false - default: sqlite:///tmp/test.db bot-token: description: Bot token for testing required: false @@ -17,20 +15,33 @@ runs: using: composite steps: # TEST ENVIRONMENT CONFIGURATION - # Creates isolated test environment with safe defaults + # Creates isolated test environment with safe defaults for pydantic settings - name: Create test environment file shell: bash run: |- - # Create .env file for CI/testing with required values + # Create .env file for CI/testing with pydantic settings format cat > .env << EOF - DEV_DATABASE_URL=${{ inputs.database-url }} - PROD_DATABASE_URL=${{ inputs.database-url }} - DEV_BOT_TOKEN=${{ inputs.bot-token }} - PROD_BOT_TOKEN=${{ inputs.bot-token }} + # Core configuration + DEBUG=True + + # Bot token + BOT_TOKEN=${{ inputs.bot-token }} + + # Database configuration (tests use py-pglite, so these are just defaults) + POSTGRES_HOST=localhost + POSTGRES_PORT=5432 + POSTGRES_DB=tuxdb_test + POSTGRES_USER=tuxuser_test + POSTGRES_PASSWORD=tuxpass_test + + # Bot info defaults + BOT_INFO__BOT_NAME=Tux Test + BOT_INFO__BOT_VERSION=0.0.0-test + BOT_INFO__PREFIX=$ EOF # Add any additional environment variables if provided if [ -n "${{ inputs.additional-vars }}" ]; then echo "${{ inputs.additional-vars }}" >> .env fi - echo "✅ Test environment file created" + echo "✅ Test environment file created with pydantic settings format" diff --git a/.github/actions/setup-nodejs-markdown/action.yml b/.github/actions/setup-nodejs-markdown/action.yml deleted file mode 100644 index d89924f55..000000000 --- a/.github/actions/setup-nodejs-markdown/action.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Setup Node.js for Markdown Linting -description: Set up Node.js with caching and install markdownlint-cli -inputs: - node-version: - description: Node.js version to use - required: false - default: '20' -runs: - using: composite - steps: - # NODE.JS ENVIRONMENT SETUP - # Required for markdownlint-cli installation and execution - - name: Setup Node.js - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: ${{ inputs.node-version }} - - # NPM CACHE OPTIMIZATION - # Reduces markdownlint installation time on repeated runs - - name: Cache node modules - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4 - with: - path: ~/.npm - key: node-${{ runner.os }}-${{ hashFiles('**/package*.json') }} - restore-keys: | - node-${{ runner.os }}- - - # MARKDOWNLINT INSTALLATION - # Global installation for CLI usage across all files - - name: Install markdownlint - shell: bash - run: npm install -g markdownlint-cli diff --git a/.github/actions/setup-python/action.yml b/.github/actions/setup-python/action.yml index 9bf0c4d28..e90d5d0fb 100644 --- a/.github/actions/setup-python/action.yml +++ b/.github/actions/setup-python/action.yml @@ -1,66 +1,38 @@ +--- name: Setup Python Environment -description: Set up Python with Poetry, dependencies, and optional Prisma client generation +description: Set up Python with Uv and dependencies inputs: python-version: description: Python version to use required: false default: '3.13' - install-groups: - description: Poetry groups to install (comma-separated) + uv-version: + description: Uv version to install (e.g. 0.8.8) required: false - default: dev,types - cache-suffix: - description: Cache key suffix for differentiation - required: false - default: default - generate-prisma: - description: Whether to generate Prisma client + default: 0.8.8 + enable-cache: + description: Enable uv cache persistence required: false default: 'true' runs: using: composite steps: - # POETRY INSTALLATION - # Uses pipx for isolated Poetry installation without conflicts - - name: Install Poetry - shell: bash - run: pipx install poetry - - # PYTHON ENVIRONMENT SETUP - # Configures Python with integrated Poetry cache support + # PYTHON ENVIRONMENT SETUP (use GitHub's cached Python) - name: Set up Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} - cache: poetry - # ADVANCED DEPENDENCY CACHING - # Multi-level caching strategy for maximum cache hit rate - - name: Cache Poetry dependencies - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4 + # UV INSTALLATION + # Installs uv and optionally enables cache persistence + - name: Install uv + uses: astral-sh/setup-uv@v6 with: - path: | - ~/.cache/pypoetry - ~/.cache/pip - key: poetry-${{ inputs.cache-suffix }}-${{ runner.os }}-${{ hashFiles('poetry.lock') - }} - restore-keys: | - poetry-${{ inputs.cache-suffix }}-${{ runner.os }}- + version: ${{ inputs.uv-version }} + enable-cache: ${{ inputs.enable-cache }} # DEPENDENCY INSTALLATION - # Installs specified Poetry groups with CI-optimized settings + # Install project with locked dependencies - name: Install dependencies shell: bash - run: | - if [[ "${{ inputs.install-groups }}" == "main" ]]; then - poetry install --only=main --no-interaction --no-ansi - else - poetry install --with=${{ inputs.install-groups }} --no-interaction --no-ansi - fi - - # CONDITIONAL PRISMA CLIENT GENERATION - # Generates Prisma database client when needed for database operations - - name: Generate Prisma client - if: ${{ inputs.generate-prisma == 'true' }} - shell: bash - run: poetry run prisma generate + run: uv sync --frozen diff --git a/.github/actions/upload-coverage/action.yml b/.github/actions/upload-coverage/action.yml deleted file mode 100644 index 2cd6a3234..000000000 --- a/.github/actions/upload-coverage/action.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: Upload Coverage to Codecov -description: Upload coverage reports and test results to Codecov -inputs: - coverage-file: - description: Path to the coverage XML file - required: true - junit-file: - description: Path to the JUnit XML file - required: false - default: '' - flags: - description: Codecov flags for categorization - required: true - name: - description: Coverage report name - required: true - codecov-token: - description: Codecov token - required: true - slug: - description: Repository slug (owner/repo) - required: false - default: allthingslinux/tux -runs: - using: composite - steps: - # COVERAGE UPLOAD TO CODECOV - # Uploads coverage data with specific flags for categorization - - name: Upload coverage to Codecov - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5 - with: - files: ${{ inputs.coverage-file }} - flags: ${{ inputs.flags }} - name: ${{ inputs.name }} - token: ${{ inputs.codecov-token }} - slug: ${{ inputs.slug }} - fail_ci_if_error: false - verbose: true - disable_search: true - - # TEST RESULTS UPLOAD TO CODECOV - # Uploads test results for analytics (only if junit file provided) - - name: Upload test results to Codecov - if: ${{ inputs.junit-file != '' }} - uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1 - with: - file: ${{ inputs.junit-file }} - flags: ${{ inputs.flags }} - token: ${{ inputs.codecov-token }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1167aaee4..930ebe274 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,361 +1,272 @@ -# ============================================================================== -# TUX DISCORD BOT - CONTINUOUS INTEGRATION WORKFLOW -# ============================================================================== -# -# This workflow handles code quality checks, linting, and validation for the -# Tux Discord bot project. It runs on every push to main and pull requests to -# ensure code quality standards are maintained across the codebase. -# -# WORKFLOW FEATURES: -# ------------------ -# 1. Smart file change detection to skip unnecessary jobs -# 2. Parallel execution for different linting categories -# 3. Comprehensive Python static analysis with basedpyright -# 4. Infrastructure validation (Docker, GitHub Actions, Shell) -# 5. Markdown linting for documentation quality -# 6. Efficient caching to reduce execution time -# -# SECURITY FEATURES: -# ------------------ -# - Minimal permissions following principle of least privilege -# - Read-only operations except for PR annotations -# - Dependency caching with content-based keys -# - No sensitive data exposure in logs -# -# PERFORMANCE OPTIMIZATIONS: -# -------------------------- -# - Conditional job execution based on file changes -# - Parallel job execution across categories -# - Multi-level caching (Poetry, npm, pip) -# - Early termination for unchanged file types -# - Fail-fast disabled to see all issues at once -# -# MAINTENANCE NOTES: -# ------------------ -# - Update action versions regularly for security patches -# - Monitor cache hit rates and adjust keys if needed -# - Keep Python version in sync with Dockerfile -# - Review ignore patterns as project evolves -# -# ============================================================================== +--- name: CI -# TRIGGER CONFIGURATION -# Runs on pushes to main branch, all pull requests, and manual triggers -# Concurrency control prevents multiple runs on the same branch on: push: - branches: - - main + branches: [main] pull_request: - branches: - - main - # Manual trigger for debugging and testing workflow changes + branches: [main] workflow_dispatch: -# CONCURRENCY CONTROL -# Prevents multiple CI runs on the same branch to save resources -# Cancels in-progress runs for PRs but allows main branch runs to complete concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} +env: + PYTHON_VERSION: '3.13' + REVIEWDOG_LEVEL: warning + REVIEWDOG_REPORTER: github-pr-review + REVIEWDOG_FILTER_MODE: file + REVIEWDOG_FAIL_LEVEL: none jobs: - # ============================================================================ - # PYTHON QUALITY CHECKS - Static Analysis and Type Checking - # ============================================================================ - # Purpose: Ensures Python code quality through static analysis and type checking - # Tools: Basedpyright type checker with Poetry dependency management - # Optimization: Only runs when Python files or dependencies change - # ============================================================================ - python: - name: Python Type Checking + changes: + name: File Detection runs-on: ubuntu-latest - permissions: - contents: read # Required for checkout - pull-requests: write # Required for basedpyright annotations + outputs: + python: ${{ steps.python_changes.outputs.any_changed }} + markdown: ${{ steps.markdown_changes.outputs.any_changed }} + shell: ${{ steps.shell_changes.outputs.any_changed }} + workflows: ${{ steps.workflow_changes.outputs.any_changed }} + docker: ${{ steps.docker_changes.outputs.any_changed }} + yaml: ${{ steps.yaml_changes.outputs.any_changed }} + any: ${{ steps.yaml_changes.outputs.any_changed }} steps: - # REPOSITORY CHECKOUT - # Full history needed for accurate change detection - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - name: Checkout + uses: actions/checkout@v4 with: fetch-depth: 0 - - # SMART CHANGE DETECTION - # Detects Python file changes to skip unnecessary runs - # Includes Python source, config files, and dependencies - - name: Detect Python changes - uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 + - name: Check Python + uses: tj-actions/changed-files@v46 id: python_changes with: files: | **/*.py pyproject.toml - poetry.lock - - # EARLY TERMINATION FOR UNCHANGED FILES - # Skips expensive Python setup if no relevant files changed - # workflow_dispatch always runs for manual testing - - name: Skip if no Python changes - if: steps.python_changes.outputs.any_changed != 'true' && github.event_name - != 'workflow_dispatch' + uv.lock + files_ignore: | + tests/**/*.py + **/tests/**/*.py + **/migrations/**/*.py + src/tux/database/migrations/**/*.py + - name: Check Markdown + uses: tj-actions/changed-files@v46 + id: markdown_changes + with: + files: '**/*.md' + - name: Check Shell + uses: tj-actions/changed-files@v46 + id: shell_changes + with: + files: | + **/*.sh + **/*.bash + **/*.zsh + scripts/** + - name: Check Workflows + uses: tj-actions/changed-files@v46 + id: workflow_changes + with: + files: .github/workflows/** + - name: Check Docker + uses: tj-actions/changed-files@v46 + id: docker_changes + with: + files: | + Dockerfile + docker-compose*.yml + .dockerignore + - name: Check YAML + uses: tj-actions/changed-files@v46 + id: yaml_changes + with: + files: | + **/*.yml + **/*.yaml + .github/** + - name: Set Outputs run: | - echo "✅ No Python files changed, skipping Python quality checks" - echo "💡 To force run checks, use workflow_dispatch trigger" + { + echo "python=${{ steps.python_changes.outputs.any_changed }}" + echo "markdown=${{ steps.markdown_changes.outputs.any_changed }}" + echo "shell=${{ steps.shell_changes.outputs.any_changed }}" + echo "workflows=${{ steps.workflow_changes.outputs.any_changed }}" + echo "docker=${{ steps.docker_changes.outputs.any_changed }}" + echo "yaml=${{ steps.yaml_changes.outputs.any_changed }}" + } >> "$GITHUB_OUTPUT" - # PYTHON ENVIRONMENT SETUP (COMPOSITE ACTION) - # Uses centralized Python setup for consistency and maintainability - # Configured for CI/linting with dev and types dependency groups - - name: Setup Python Environment - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' + # Check if any files changed + if [[ "${{ steps.python_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.markdown_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.shell_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.workflow_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.docker_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.yaml_changes.outputs.any_changed }}" == "true" ]]; then + echo "any=true" >> "$GITHUB_OUTPUT" + else + echo "any=false" >> "$GITHUB_OUTPUT" + fi + quality: + name: Python + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.python == 'true' || github.event_name == 'workflow_dispatch' + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Python uses: ./.github/actions/setup-python with: - python-version: '3.13' - install-groups: dev,types - cache-suffix: ci - generate-prisma: 'true' - - # STATIC TYPE CHECKING - # basedpyright provides comprehensive type checking for Python - # Annotations appear directly in PR for developer feedback - - name: Run basedpyright type checker - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' - run: poetry run basedpyright - - # ============================================================================ - # MARKDOWN DOCUMENTATION LINTING - # ============================================================================ - # Purpose: Ensures consistent documentation formatting across the project - # Tools: markdownlint-cli with custom rule configuration - # Scope: All .md files excluding dependencies and build artifacts - # ============================================================================ - markdown-lint: - name: Markdown Linting + python-version: ${{ env.PYTHON_VERSION }} + enable-cache: true + - name: Setup Reviewdog + uses: reviewdog/action-setup@d8edfce3dd5e1ec6978745e801f9c50b5ef80252 + with: + reviewdog_version: latest + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Type Check + uses: ./.github/actions/action-basedpyright + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + - name: Run ruff with reviewdog + run: | + echo "Running ruff with reviewdog..." + uv run ruff check --config pyproject.toml --output-format rdjson . | \ + reviewdog -f=rdjson \ + -name=ruff \ + -reporter=${{ env.REVIEWDOG_REPORTER }} \ + -level=${{ env.REVIEWDOG_LEVEL }} \ + -filter-mode=${{ env.REVIEWDOG_FILTER_MODE }} \ + -fail-level=${{ env.REVIEWDOG_FAIL_LEVEL }} + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + markdown: + name: Markdown runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.markdown == 'true' permissions: contents: read + pull-requests: write steps: - # REPOSITORY CHECKOUT - # Shallow clone sufficient for linting current state - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # SMART CHANGE DETECTION - # Only runs when documentation files change - # Improves CI performance for code-only changes - - name: Detect Markdown changes - uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 - id: markdown_changes + - name: Checkout + uses: actions/checkout@v4 + - name: Lint + uses: reviewdog/action-markdownlint@v0.26.2 with: - files: '**/*.md' - - # EARLY TERMINATION FOR UNCHANGED DOCS - # Skips Node.js setup and linting if no docs changed - - name: Skip if no Markdown changes - if: steps.markdown_changes.outputs.any_changed != 'true' - run: | - echo "✅ No Markdown files changed, skipping Markdown linting" - - # NODE.JS ENVIRONMENT SETUP WITH MARKDOWNLINT - # Sets up Node.js and installs markdownlint-cli with caching - - name: Setup Node.js and markdownlint - if: steps.markdown_changes.outputs.any_changed == 'true' - uses: ./.github/actions/setup-nodejs-markdown - - # MARKDOWN LINTING EXECUTION - # Custom rule configuration balances strictness with practicality - # Disabled rules: MD013 (line length), MD033 (HTML), MD041 (first line) - - name: Run Markdown linting - if: steps.markdown_changes.outputs.any_changed == 'true' - run: | - npx markdownlint \ - --disable MD013 MD033 MD041 \ - --ignore node_modules \ - --ignore .venv \ - --ignore .archive \ - "**/*.md" - - # ============================================================================ - # INFRASTRUCTURE VALIDATION - Multi-Category Linting Matrix - # ============================================================================ - # Purpose: Validates infrastructure code (Docker, CI/CD, Shell scripts) - # Strategy: Matrix execution for parallel validation of different file types - # Performance: Only runs on push/dispatch to avoid PR overhead - # ============================================================================ - infrastructure: - name: Infrastructure Linting + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + markdownlint_flags: -c .markdownlint.yaml + shell: + name: Shell runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.shell == 'true' permissions: contents: read - # EXECUTION CONTROL - # Skip for PRs to reduce noise unless explicitly triggered - # Infrastructure changes are typically reviewed separately - if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' - - # MATRIX STRATEGY - # Parallel execution of different infrastructure categories - # fail-fast disabled to see all infrastructure issues at once - strategy: - fail-fast: false - matrix: - include: - # DOCKER VALIDATION - # Validates Dockerfile syntax and Docker Compose configuration - - type: Docker - files: Dockerfile*,docker-compose*.yml - - # GITHUB ACTIONS VALIDATION - # Validates workflow syntax and actionlint rules - - type: GitHub Actions - files: .github/workflows/** - - # SHELL SCRIPT VALIDATION - # Validates shell scripts for syntax and best practices - - type: Shell Scripts - files: '**/*.sh,**/*.bash,scripts/**' + pull-requests: write steps: - # REPOSITORY CHECKOUT - # Shallow clone sufficient for infrastructure validation - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # SMART CHANGE DETECTION - # Each matrix job only runs if relevant files changed - # Improves efficiency by skipping unchanged categories - - name: Detect ${{ matrix.type }} changes - uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 - id: infra_changes + - name: Checkout + uses: actions/checkout@v4 + - name: Lint + uses: reviewdog/action-shellcheck@v1.31 with: - files: ${{ matrix.files }} - - # EARLY TERMINATION FOR UNCHANGED CATEGORIES - # Skips expensive validation setup if no files changed - - name: Skip if no ${{ matrix.type }} changes - if: steps.infra_changes.outputs.any_changed != 'true' - run: | - echo "✅ No ${{ matrix.type }} files changed, skipping ${{ matrix.type }} linting" - - # DOCKER COMPOSE ENVIRONMENT SETUP - # Verifies Docker Compose v2 availability on GitHub runners - # Handles both v1 and v2 for compatibility - - name: Set up Docker Compose v2 - if: matrix.type == 'Docker' && steps.infra_changes.outputs.any_changed == - 'true' - run: | - # Docker Compose v2 is pre-installed on GitHub runners - # Just verify it's available and supports the develop configuration - docker compose version - echo "✅ Docker Compose v2 is available" - - # DOCKER COMPOSE VALIDATION ENVIRONMENT - # Creates minimal .env file required for compose config validation - # Contains placeholder values that satisfy syntax requirements - - name: Create test environment for Docker Compose validation - if: matrix.type == 'Docker' && steps.infra_changes.outputs.any_changed == - 'true' - uses: ./.github/actions/create-test-env + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + - name: Format + uses: reviewdog/action-shfmt@v1.0.4 with: - additional-vars: | - PROD_DATABASE_URL=sqlite:///tmp/test.db - PROD_BOT_TOKEN=test_token_for_ci_validation - - # DOCKER VALIDATION EXECUTION - # Runs Hadolint for Dockerfile best practices - # Validates Docker Compose syntax with version compatibility - - name: Run Docker linting - if: matrix.type == 'Docker' && steps.infra_changes.outputs.any_changed == - 'true' - run: | - # DOCKERFILE LINTING WITH HADOLINT - # Ignores specific rules that conflict with our multi-stage build - # DL3008: Pin versions in apt (handled by explicit version specs) - # DL3009: Delete apt cache (handled by multi-line RUN optimization) - docker run --rm -i hadolint/hadolint hadolint \ - --ignore DL3008 \ - --ignore DL3009 \ - - < Dockerfile - - # DOCKER COMPOSE SYNTAX VALIDATION - # Supports both v1 and v2 for maximum compatibility - # Uses config --quiet to validate without exposing secrets - if command -v docker compose >/dev/null 2>&1; then - echo "Using Docker Compose v2" - docker compose -f docker-compose.yml config --quiet - docker compose -f docker-compose.dev.yml config --quiet - elif command -v docker-compose >/dev/null 2>&1; then - echo "Using Docker Compose v1" - docker-compose -f docker-compose.yml config --quiet - docker-compose -f docker-compose.dev.yml config --quiet - else - echo "Neither docker compose nor docker-compose found" - exit 1 - fi - - # GITHUB ACTIONS VALIDATION - # Uses actionlint for comprehensive workflow validation - # Checks syntax, job dependencies, and GitHub Actions best practices - - name: Run GitHub Actions linting - if: matrix.type == 'GitHub Actions' && steps.infra_changes.outputs.any_changed - == 'true' - uses: raven-actions/actionlint@3a24062651993d40fed1019b58ac6fbdfbf276cc # v2 + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + shfmt_flags: -i 2 -ci -bn -sr -kp -w -s -p + workflows: + name: Workflows + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.workflows == 'true' + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Validate + uses: reviewdog/action-actionlint@v1.66.1 with: - files: .github/workflows/*.yml - - # SHELL SCRIPT VALIDATION - # Uses ShellCheck for comprehensive shell script analysis - # Focuses on scripts directory for project-specific scripts - - name: Run Shell linting - if: matrix.type == 'Shell Scripts' && steps.infra_changes.outputs.any_changed - == 'true' - uses: ludeeus/action-shellcheck@master + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + docker: + name: Docker + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.docker == 'true' + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Lint + uses: reviewdog/action-hadolint@v1.50.2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + hadolint_ignore: DL3008 DL3009 + yaml: + name: YAML + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.yaml == 'true' + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Lint + uses: reviewdog/action-yamllint@v1.21.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + level: ${{ env.REVIEWDOG_LEVEL }} + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + security: + name: Security + runs-on: ubuntu-latest + needs: [changes] + if: always() + permissions: + contents: read + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Scan + uses: reviewdog/action-gitleaks@v1.7 with: - scandir: ./scripts -# ============================================================================== -# CI WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. PERFORMANCE OPTIMIZATION: -# - Smart change detection to skip unnecessary work -# - Parallel job execution across categories -# - Multi-level caching for dependencies -# - Early termination for unchanged files -# -# 2. SECURITY & PERMISSIONS: -# - Minimal required permissions for each job -# - No sensitive data exposure in validation -# - Read-only operations where possible -# - Secure dependency installation practices -# -# 3. MAINTAINABILITY: -# - Clear job names and step descriptions -# - Consistent error handling and reporting -# - Comprehensive documentation for each section -# - Version pinning for reproducible builds -# -# 4. DEVELOPER EXPERIENCE: -# - Clear skip messages explaining why jobs didn't run -# - Direct PR annotations for type checking errors -# - Fail-fast disabled to see all issues at once -# - Manual trigger option for debugging -# -# 5. RELIABILITY: -# - Robust error handling and fallbacks -# - Compatible with both Docker Compose v1 and v2 -# - Comprehensive validation across file types -# - Proper cache invalidation strategies -# -# USAGE EXAMPLES: -# --------------- -# Manual trigger: -# GitHub UI → Actions → CI → Run workflow -# -# Force run all checks: -# Uses workflow_dispatch trigger to bypass change detection -# -# View job results: -# Check Actions tab for detailed logs and annotations -# -# Troubleshoot cache issues: -# Clear cache keys if dependencies get corrupted -# -# ============================================================================== + github_token: ${{ secrets.GITHUB_TOKEN }} + level: error + reporter: ${{ env.REVIEWDOG_REPORTER }} + filter_mode: ${{ env.REVIEWDOG_FILTER_MODE }} + fail_level: ${{ env.REVIEWDOG_FAIL_LEVEL }} + gitleaks_flags: --verbose diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml new file mode 100644 index 000000000..e535c15a6 --- /dev/null +++ b/.github/workflows/cleanup.yml @@ -0,0 +1,165 @@ +--- +name: Registry Cleanup +on: + workflow_dispatch: + inputs: + cleanup_type: + description: Type of cleanup to perform + required: true + default: standard + type: choice + options: [standard, aggressive, build-cache-only] + keep_versions: + description: Number of versions to keep + required: false + default: '10' + dry_run: + description: Dry run (don't actually delete) + type: boolean + default: false + schedule: + - cron: 0 1 1 * * # Monthly aggressive cleanup on 1st at 1 AM +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: false +env: + PACKAGE_NAME: tux + PACKAGE_TYPE: container +jobs: + cleanup: + name: Registry Cleanup + runs-on: ubuntu-latest + permissions: + packages: write + contents: read + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Cleanup Parameters + id: params + run: | + case "${{ github.event.inputs.cleanup_type || 'standard' }}" in + "standard") + KEEP_VERSIONS="${{ github.event.inputs.keep_versions || '15' }}" + REMOVE_UNTAGGED="true" + CLEAN_BUILD_CACHE="true" + ;; + "aggressive") + KEEP_VERSIONS="${{ github.event.inputs.keep_versions || '5' }}" + REMOVE_UNTAGGED="true" + CLEAN_BUILD_CACHE="true" + ;; + "build-cache-only") + KEEP_VERSIONS="999" + REMOVE_UNTAGGED="false" + CLEAN_BUILD_CACHE="true" + ;; + esac + { + echo "keep_versions=$KEEP_VERSIONS" + echo "remove_untagged=$REMOVE_UNTAGGED" + echo "clean_build_cache=$CLEAN_BUILD_CACHE" + echo "cleanup_type=${{ github.event.inputs.cleanup_type || 'standard' }}" + echo "dry_run=${{ github.event.inputs.dry_run || 'false' }}" + } >> "$GITHUB_OUTPUT" + - name: Registry Analysis + id: analysis + run: | + { + echo "## 🔍 Registry Analysis" + echo "**Cleanup Type**: ${{ steps.params.outputs.cleanup_type }}" + echo "**Keep Versions**: ${{ steps.params.outputs.keep_versions }}" + echo "**Dry Run**: ${{ steps.params.outputs.dry_run }}" + echo "" + } >> "$GITHUB_STEP_SUMMARY" + + # Get current registry info + PACKAGE_INFO=$(gh api user/packages/${{ env.PACKAGE_TYPE }}/${{ env.PACKAGE_NAME }} 2>/dev/null || echo '{"size_in_bytes": 0, "version_count": 0}') + SIZE_BYTES=$(echo "$PACKAGE_INFO" | jq -r '.size_in_bytes // 0') + VERSION_COUNT=$(echo "$PACKAGE_INFO" | jq -r '.version_count // 0') + SIZE_GB=$(echo "scale=2; $SIZE_BYTES / 1024 / 1024 / 1024" | bc -l 2>/dev/null || echo "0") + { + echo "**Current Registry Size**: ${SIZE_GB}GB" + echo "**Current Version Count**: $VERSION_COUNT" + echo "" + echo "**Current Versions:**" + echo '```' + } >> "$GITHUB_STEP_SUMMARY" + + # List current versions + gh api user/packages/${{ env.PACKAGE_TYPE }}/${{ env.PACKAGE_NAME }}/versions | \ + jq -r '.[] | "\(.name) - \(.created_at) - \(.size_in_bytes) bytes"' | \ + head -20 >> "$GITHUB_STEP_SUMMARY" 2>/dev/null || echo "Could not list versions" >> "$GITHUB_STEP_SUMMARY" + { + echo '```' + echo "" + } >> "$GITHUB_STEP_SUMMARY" + { + echo "size_gb=$SIZE_GB" + echo "version_count=$VERSION_COUNT" + } >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean Old Versions + if: steps.params.outputs.cleanup_type != 'build-cache-only' + run: | + { + echo "## 🧹 Cleaning Old Versions" + if [ "${{ steps.params.outputs.dry_run }}" = "true" ]; then + echo "**DRY RUN**: Would keep ${{ steps.params.outputs.keep_versions }} versions" + echo "**DRY RUN**: Would remove untagged: ${{ steps.params.outputs.remove_untagged }}" + else + echo "Cleaning old versions..." + gh api -X DELETE user/packages/${{ env.PACKAGE_TYPE }}/${{ env.PACKAGE_NAME }}/versions \ + --field min-versions-to-keep="${{ steps.params.outputs.keep_versions }}" \ + --field delete-only-untagged-versions="${{ steps.params.outputs.remove_untagged }}" || \ + echo "Cleanup completed or no versions to clean" + fi + echo "" + } >> "$GITHUB_STEP_SUMMARY" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean Build Cache + if: steps.params.outputs.clean_build_cache == 'true' + run: | + echo "## 🗑️ Cleaning Build Cache" >> "$GITHUB_STEP_SUMMARY" + + # Find build cache images older than 7 days + CUTOFF_DATE=$(date -d '7 days ago' -Iseconds) + BUILD_CACHE_IMAGES=$(gh api user/packages/${{ env.PACKAGE_TYPE }}/${{ env.PACKAGE_NAME }}/versions | \ + jq -r --arg cutoff "$CUTOFF_DATE" '.[] | select(.name | contains("buildcache")) | select(.created_at < $cutoff) | .id' 2>/dev/null || echo "") + if [ -n "$BUILD_CACHE_IMAGES" ]; then + { + echo "**Found build cache images to clean:**" + echo '```' + echo "$BUILD_CACHE_IMAGES" + echo '```' + } >> "$GITHUB_STEP_SUMMARY" + if [ "${{ steps.params.outputs.dry_run }}" = "true" ]; then + echo "**DRY RUN**: Would delete these build cache images" >> "$GITHUB_STEP_SUMMARY" + else + echo "$BUILD_CACHE_IMAGES" | xargs -I {} gh api -X DELETE user/packages/${{ env.PACKAGE_TYPE }}/${{ env.PACKAGE_NAME }}/versions/{} || \ + echo "Build cache cleanup completed" >> "$GITHUB_STEP_SUMMARY" + fi + else + echo "**No build cache images to clean**" >> "$GITHUB_STEP_SUMMARY" + fi + echo "" >> "$GITHUB_STEP_SUMMARY" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Cleanup Summary + run: |- + { + echo "## ✅ Cleanup Summary" + echo "**Cleanup Type**: ${{ steps.params.outputs.cleanup_type }}" + echo "**Versions Kept**: ${{ steps.params.outputs.keep_versions }}" + echo "**Untagged Removed**: ${{ steps.params.outputs.remove_untagged }}" + echo "**Build Cache Cleaned**: ${{ steps.params.outputs.clean_build_cache }}" + echo "**Dry Run**: ${{ steps.params.outputs.dry_run }}" + echo "" + if [ "${{ steps.params.outputs.dry_run }}" = "false" ]; then + echo "**Status**: ✅ Cleanup completed successfully" + else + echo "**Status**: 🔍 Dry run completed - no changes made" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index f089a3308..a5bb7044a 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,17 +1,15 @@ +--- name: Deploy on: release: - types: - - published + types: [published] workflow_dispatch: inputs: environment: description: Environment to deploy to required: true type: choice - options: - - staging - - production + options: [staging, production] default: staging concurrency: group: deploy-${{ github.event.inputs.environment || 'production' }} @@ -29,8 +27,8 @@ jobs: deployments: write steps: - name: Checkout - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - name: Get Docker image + uses: actions/checkout@v4 + - name: Get Image id: image run: | if [ "${{ github.event_name }}" = "release" ]; then @@ -43,7 +41,7 @@ jobs: IMAGE="ghcr.io/${{ github.repository }}:${IMAGE_TAG}" echo "image=$IMAGE" >> "$GITHUB_OUTPUT" echo "Deploying image: $IMAGE" - - name: Deploy to environment + - name: Deploy id: deploy run: | ENV="${{ github.event.inputs.environment || 'production' }}" @@ -66,7 +64,7 @@ jobs: else echo "url=https://staging.your-app.com" >> "$GITHUB_OUTPUT" fi - - name: Deployment notification + - name: Notify if: always() run: |- ENV="${{ github.event.inputs.environment || 'production' }}" diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 75369d9ef..8e6ee1ab3 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,450 +1,221 @@ -# ============================================================================== -# TUX DISCORD BOT - DOCKER BUILD & DEPLOYMENT WORKFLOW -# ============================================================================== -# -# This workflow handles Docker image building, testing, and deployment for the -# Tux Discord bot. It provides secure, multi-platform container builds with -# comprehensive security scanning and optimized caching strategies for -# production deployment and container registry management. -# -# WORKFLOW FEATURES: -# ------------------ -# 1. Multi-platform builds (AMD64, ARM64) for broad compatibility -# 2. Comprehensive security scanning with Trivy vulnerability detection -# 3. Advanced build caching for faster subsequent builds -# 4. Production image validation and smoke testing -# 5. Automated registry cleanup to prevent storage bloat -# 6. Secure container registry authentication and management -# -# BUILD STRATEGY: -# --------------- -# - PR Validation: Quick syntax/build validation without push -# - Tag Builds: Full multi-platform builds with security scanning -# - Main Branch: Single-platform builds for development -# - Scheduled: Monthly cleanup of unused images and cache -# -# SECURITY FEATURES: -# ------------------ -# - SLSA provenance and SBOM generation for releases -# - Trivy vulnerability scanning with SARIF upload -# - Secure registry authentication via GitHub tokens -# - Minimal image permissions and isolation -# - Container content verification through smoke tests -# -# PERFORMANCE OPTIMIZATIONS: -# -------------------------- -# - GitHub Actions cache for build layers -# - Multi-stage Dockerfile optimization -# - Platform-conditional builds (ARM64 only for releases) -# - Build timeout controls to prevent hanging -# - Efficient layer caching with cache-from/cache-to -# -# ============================================================================== +--- name: Docker -# TRIGGER CONFIGURATION -# Comprehensive triggering for different build scenarios -# Includes pull request validation, tag-based releases, and maintenance on: - # VERSION RELEASES - # Triggered by semantic version tags (v1.0.0, v1.2.3-beta, etc.) push: - tags: - - v* - - # PULL REQUEST VALIDATION - # Validates Docker builds without pushing to registry + tags: [v*] pull_request: - branches: - - main - - # MANUAL TRIGGER - # Allows manual builds for testing and debugging + branches: [main] workflow_dispatch: - - # SCHEDULED MAINTENANCE - # Monthly cleanup spread across different days to avoid resource conflicts schedule: - - cron: 0 2 15 * * # Monthly cleanup on the 15th (spread from maintenance.yml) -# CONCURRENCY MANAGEMENT -# Prevents resource conflicts and manages parallel builds efficiently + - cron: 0 2 15 * * concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} -# GLOBAL ENVIRONMENT VARIABLES -# Centralized configuration for registry settings and build options env: - REGISTRY: ghcr.io # GitHub Container Registry - IMAGE_NAME: ${{ github.repository }} # Repository-based image name - DOCKER_BUILD_SUMMARY: true # Enable build summaries - DOCKER_BUILD_CHECKS_ANNOTATIONS: true # Enable build annotations + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + DOCKER_BUILD_SUMMARY: true + DOCKER_BUILD_CHECKS_ANNOTATIONS: true + PYTHON_VERSION: '3.13' jobs: - # ============================================================================ - # DOCKER BUILD VALIDATION - Pull Request Verification - # ============================================================================ - # Purpose: Validates Docker builds on pull requests without registry push - # Strategy: Fast validation with caching to ensure buildability - # Scope: Syntax validation, dependency resolution, build completion - # Performance: Optimized for quick feedback in PR reviews - # ============================================================================ + changes: + name: File Detection + runs-on: ubuntu-latest + outputs: + docker: ${{ steps.docker_changes.outputs.any_changed }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Check Docker + uses: tj-actions/changed-files@v46 + id: docker_changes + with: + files: | + Dockerfile + docker-compose*.yml + .dockerignore + docker/** validate: - name: Validate Build - # EXECUTION CONDITIONS - # Only runs on pull requests to validate changes without deployment - if: github.event_name == 'pull_request' + name: Validate + needs: [changes] + if: (needs.changes.outputs.docker == 'true' || github.event_name == 'workflow_dispatch') + && github.event_name == 'pull_request' runs-on: ubuntu-latest permissions: - contents: read # Required for repository checkout + contents: read + pull-requests: write steps: - # DOCKER BUILDX SETUP - # Advanced Docker builder with enhanced caching and multi-platform support - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 - - # VERSION INFORMATION PREPARATION - # Generates PR-specific version information for build context - - name: Prepare version info - id: version + - name: Setup Buildx + uses: docker/setup-buildx-action@v3 + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: tux + tags: | + type=raw,value=pr-${{ github.event.number }} + labels: | + org.opencontainers.image.title=Tux + org.opencontainers.image.description=Tux - The all in one discord bot for the All Things Linux Community + org.opencontainers.image.source=https://github.com/allthingslinux/tux + org.opencontainers.image.licenses=GPL-3.0 + org.opencontainers.image.authors=All Things Linux + org.opencontainers.image.vendor=All Things Linux + org.opencontainers.image.revision=${{ github.sha }} + org.opencontainers.image.documentation=https://github.com/allthingslinux/tux/blob/main/README.md + - name: Generate PR Version + id: pr_version run: | - # For PR validation, use PR number and short SHA for version - VERSION="pr-${{ github.event.number }}-$(echo "${{ github.sha }}" | cut -c1-7)" - { - echo "version=$VERSION" - echo "git_sha=${{ github.sha }}" - echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" - } >> "$GITHUB_OUTPUT" - - # VALIDATION BUILD EXECUTION - # Builds production image without pushing to validate build process - # Uses GitHub Actions cache for improved performance - - name: Build for validation (Git context) - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 + # Generate git describe format for PR builds to match VERSIONING.md expectations + PR_VERSION="pr-${{ github.event.number }}-$(echo "${{ github.sha }}" | cut -c1-7)" + echo "version=$PR_VERSION" >> "$GITHUB_OUTPUT" + echo "Generated PR version: $PR_VERSION" + - name: Build + uses: docker/build-push-action@v6 timeout-minutes: 10 with: - target: production # Build production target for realistic validation - push: false # Don't push to registry during validation - load: false # Don't load image unless testing required - cache-from: type=gha # Use GitHub Actions cache for faster builds - cache-to: type=gha,mode=max # Update cache for future builds - tags: tux:pr-${{ github.event.number }} + target: production + push: false + load: false + cache-from: type=gha + cache-to: type=gha,mode=max + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} build-args: | - VERSION=${{ steps.version.outputs.version }} - GIT_SHA=${{ steps.version.outputs.git_sha }} - BUILD_DATE=${{ steps.version.outputs.build_date }} - # CONTAINER METADATA ANNOTATIONS - # OCI-compliant image annotations for proper registry metadata - annotations: | - org.opencontainers.image.title="Tux" - org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" - org.opencontainers.image.source="https://github.com/allthingslinux/tux" - org.opencontainers.image.licenses="GPL-3.0" - org.opencontainers.image.authors="All Things Linux" - org.opencontainers.image.vendor="All Things Linux" - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" - - # VALIDATION COMPLETION STATUS - # Provides clear feedback on validation success - - name: Validation complete + VERSION=${{ steps.pr_version.outputs.version }} + GIT_SHA=${{ github.sha }} + BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') + - name: Complete run: | echo "✅ Docker build validation completed successfully" echo "🔍 Build cache updated for faster future builds" - - # ============================================================================ - # PRODUCTION BUILD & DEPLOYMENT - Multi-Platform Container Images - # ============================================================================ - # Purpose: Builds and deploys production-ready container images - # Strategy: Multi-platform builds with security scanning and testing - # Targets: GitHub Container Registry with proper versioning - # Security: Vulnerability scanning, provenance, and SBOM generation - # ============================================================================ + - name: Checkout + uses: actions/checkout@v4 + - name: Scan Dockerfile + uses: reviewdog/action-trivy@v1 + continue-on-error: true + with: + github_token: ${{ github.token }} + trivy_command: config + trivy_target: ./Dockerfile + trivy_version: v0.63.0 + level: warning + reporter: github-pr-review + tool_name: trivy-dockerfile + filter_mode: added + trivy_flags: --severity HIGH,CRITICAL + - name: Scan Image + if: always() + uses: reviewdog/action-trivy@v1 + continue-on-error: true + with: + github_token: ${{ github.token }} + trivy_command: image + trivy_target: tux:pr-${{ github.event.number }} + trivy_version: v0.63.0 + level: warning + reporter: github-pr-review + tool_name: trivy-image + filter_mode: added + trivy_flags: --severity HIGH,CRITICAL --exit-code 0 build: name: Build & Push - # EXECUTION CONDITIONS - # Skips pull requests to prevent unnecessary deployments - # Waits for validation to complete before proceeding - if: github.event_name != 'pull_request' - needs: # Always wait for validation - - validate runs-on: ubuntu-latest + needs: [validate] + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') permissions: - contents: read # Repository access for build context - packages: write # Container registry push permissions - security-events: write # Security scanning result upload - actions: read # Actions cache access - id-token: write # OIDC token for SLSA provenance - - # OUTPUT CONFIGURATION - # Provides build outputs for downstream jobs (security scanning, cleanup) - outputs: - image: ${{ steps.meta.outputs.tags }} - digest: ${{ steps.build.outputs.digest }} + contents: read + packages: write steps: - # REPOSITORY CHECKOUT - # Full history needed for accurate version determination - name: Checkout - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - with: - fetch-depth: 0 - - # INTELLIGENT VERSION DETERMINATION - # Robust version resolution with multiple fallback strategies - - name: Prepare version info - id: version - run: | - # Try to get version from git tags, fallback to SHA (consistent with Dockerfile) - # Execute git commands only once and store results to avoid transient failures - if EXACT_TAG=$(git describe --tags --exact-match 2>/dev/null); then - VERSION=${EXACT_TAG#v} - elif TAG_DESC=$(git describe --tags --always 2>/dev/null); then - VERSION=${TAG_DESC#v} - else - VERSION="$(date +'%Y%m%d')-$(echo "${{ github.sha }}" | cut -c1-7)" - fi - { - echo "version=$VERSION" - echo "git_sha=${{ github.sha }}" - echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" - } >> "$GITHUB_OUTPUT" - echo "Using version: $VERSION" - - # MULTI-PLATFORM EMULATION SETUP - # QEMU enables building ARM64 images on AMD64 runners - - name: Set up QEMU - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 - with: - platforms: linux/amd64,linux/arm64 - - # ADVANCED DOCKER BUILDX CONFIGURATION - # Enhanced builder with latest BuildKit features - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 - with: - driver-opts: | - image=moby/buildkit:buildx-stable-1 - - # SECURE REGISTRY AUTHENTICATION - # GitHub token-based authentication for container registry - - name: Log in to Container Registry - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3 + uses: actions/checkout@v4 + - name: Setup Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Registry + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - # METADATA EXTRACTION AND TAG GENERATION - # Generates appropriate tags and labels based on git context - name: Extract metadata id: meta - uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5 + uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - flavor: | - latest=${{ github.ref == 'refs/heads/main' }} tags: | - type=ref,event=branch # Branch-based tags for development - type=ref,event=tag # Version tags for releases - type=sha # SHA-based tags for traceability + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=raw,value=latest,enable={{is_default_branch}} labels: | - org.opencontainers.image.title="Tux" - org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" - org.opencontainers.image.source="https://github.com/${{ github.repository }}" - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses="GPL-3.0" - org.opencontainers.image.authors="All Things Linux" - org.opencontainers.image.vendor="All Things Linux" - org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" - - # PRODUCTION BUILD AND DEPLOYMENT - # Multi-platform build with advanced security and performance features - - name: Build and push - id: build - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 - timeout-minutes: 20 + org.opencontainers.image.title=Tux + org.opencontainers.image.description=Tux - The all in one discord bot for the All Things Linux Community + org.opencontainers.image.source=https://github.com/allthingslinux/tux + org.opencontainers.image.licenses=GPL-3.0 + org.opencontainers.image.authors=All Things Linux + org.opencontainers.image.vendor=All Things Linux + org.opencontainers.image.documentation=https://github.com/allthingslinux/tux/blob/main/README.md + - name: Generate Release Version + id: release_version + run: | + # Generate git describe format for release builds to match VERSIONING.md expectations + # This ensures the VERSION file contains the exact format expected by __init__.py + TAG_VERSION="${GITHUB_REF#refs/tags/}" + CLEAN_VERSION="${TAG_VERSION#v}" # Remove 'v' prefix if present + RELEASE_VERSION="$CLEAN_VERSION" + echo "version=$RELEASE_VERSION" >> "$GITHUB_OUTPUT" + echo "Generated release version: $RELEASE_VERSION" + - name: Build & Push + uses: docker/build-push-action@v6 + timeout-minutes: 15 with: - context: . target: production push: true + cache-from: type=gha + cache-to: type=gha,mode=max tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha # Use GitHub Actions cache - cache-to: type=gha,mode=max # Update cache comprehensively - # CONDITIONAL MULTI-PLATFORM BUILDS - # ARM64 builds only for tagged releases to save resources - platforms: ${{ (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && contains(github.ref, 'v')) && 'linux/amd64,linux/arm64' || 'linux/amd64' }} - # SECURITY ATTESTATIONS - # SLSA provenance and SBOM only for releases - provenance: ${{ startsWith(github.ref, 'refs/tags/') }} - sbom: ${{ startsWith(github.ref, 'refs/tags/') }} - annotations: ${{ steps.meta.outputs.annotations }} build-args: | - BUILDKIT_INLINE_CACHE=1 - VERSION=${{ steps.version.outputs.version }} - GIT_SHA=${{ steps.version.outputs.git_sha }} - BUILD_DATE=${{ steps.version.outputs.build_date }} - - # PRODUCTION IMAGE VERIFICATION - # Smoke test to verify image functionality and dependency availability - - name: Test pushed image - run: | - docker run --rm --name tux-prod-test \ - --entrypoint python \ - "$(echo '${{ steps.meta.outputs.tags }}' | head -1)" \ - -c "import tux; import sqlite3; import asyncio; print('🔍 Testing production image...'); print('✅ Bot imports successfully'); print('✅ Dependencies available'); conn = sqlite3.connect(':memory:'); conn.close(); print('✅ Database connectivity working'); print('🎉 Production image verified!')" - - # ============================================================================ - # SECURITY SCANNING - Vulnerability Detection and Reporting - # ============================================================================ - # Purpose: Comprehensive security scanning of built container images - # Tools: Trivy vulnerability scanner with SARIF output - # Integration: GitHub Security tab for centralized vulnerability management - # Scope: Critical and high severity vulnerabilities - # ============================================================================ - security: - name: Security Scan - # EXECUTION CONDITIONS - # Runs after successful build, skips pull requests - if: github.event_name != 'pull_request' - needs: build - runs-on: ubuntu-latest - permissions: - security-events: write # Required for SARIF upload - steps: - # REPOSITORY CHECKOUT - # Required for Dockerfile analysis and security context - - name: Checkout repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - with: - fetch-depth: 0 - - # IMAGE REFERENCE EXTRACTION - # Gets the first (primary) image tag for security scanning - - name: Get first image tag - id: first_tag - run: echo "image=$(echo '${{ needs.build.outputs.image }}' | head -1)" >> - "$GITHUB_OUTPUT" - - # TRIVY CACHE OPTIMIZATION - # Caches vulnerability database for faster subsequent scans - - name: Cache Trivy - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4 + VERSION=${{ steps.release_version.outputs.version }} + GIT_SHA=${{ github.sha }} + BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') + - name: Scan Final Image + if: always() + uses: reviewdog/action-trivy@v1 + continue-on-error: true with: - path: ~/.cache/trivy - key: cache-trivy-${{ hashFiles('Dockerfile') }}-${{ github.run_id }} - restore-keys: | - cache-trivy-${{ hashFiles('Dockerfile') }}- - cache-trivy- - - # VULNERABILITY SCANNING EXECUTION - # Comprehensive container image security analysis - - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@master - with: - image-ref: ${{ steps.first_tag.outputs.image }} - format: sarif # GitHub Security compatible format - output: trivy-results.sarif - severity: CRITICAL,HIGH # Focus on actionable vulnerabilities - scanners: vuln # Vulnerability scanning only - - # SECURITY RESULTS INTEGRATION - # Uploads scan results to GitHub Security tab for centralized management - - name: Upload Trivy scan results - uses: github/codeql-action/upload-sarif@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3 - with: - sarif_file: trivy-results.sarif - - # ============================================================================ - # CONTAINER REGISTRY CLEANUP - Automated Storage Management - # ============================================================================ - # Purpose: Automated cleanup of old container images and build artifacts - # Schedule: Monthly cleanup to prevent registry storage bloat - # Strategy: Retains recent versions while removing older, unused images - # Safety: Conservative retention policy to prevent accidental data loss - # ============================================================================ + github_token: ${{ github.token }} + trivy_command: image + trivy_target: ${{ fromJSON(steps.meta.outputs.json).tags[0] }} + trivy_version: v0.63.0 + level: warning + reporter: github-pr-review + tool_name: trivy-final + filter_mode: nofilter + trivy_flags: --severity HIGH,CRITICAL --exit-code 0 cleanup: - name: Registry Cleanup - # EXECUTION CONDITIONS - # Runs on scheduled maintenance or manual trigger only - if: github.event_name != 'pull_request' && (github.event_name == 'schedule' || - github.event_name == 'workflow_dispatch') + name: Cleanup runs-on: ubuntu-latest + if: github.event_name == 'schedule' permissions: - packages: write # Required for container registry management + packages: write + contents: read steps: - # AUTOMATED VERSION CLEANUP - # Removes old container versions while preserving recent releases - - name: Delete old container versions - uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5 + - name: Checkout + uses: actions/checkout@v4 + - name: Clean Old Images + uses: actions/delete-package-versions@v5 with: - package-name: tux # Target package name - package-type: container # Container images only - min-versions-to-keep: 10 # Safety buffer for rollbacks - delete-only-untagged-versions: false # Clean tagged versions too - - # LEGACY BUILDCACHE CLEANUP - # Cleans up any remaining build cache artifacts from previous configurations - - name: Delete buildcache images - continue-on-error: true # Non-critical cleanup operation - run: | - echo "Cleaning up any remaining buildcache images..." - # This will help clean up existing buildcache images - # After our fix, no new buildcache images should be created -# ============================================================================== -# DOCKER WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. SECURITY & COMPLIANCE: -# - Comprehensive vulnerability scanning with Trivy -# - SLSA provenance and SBOM generation for releases -# - Secure registry authentication with minimal permissions -# - Container content verification through smoke tests -# - SARIF integration for centralized security management -# -# 2. PERFORMANCE OPTIMIZATION: -# - Multi-level caching (GitHub Actions, BuildKit inline cache) -# - Conditional multi-platform builds to save resources -# - Build timeout controls to prevent resource waste -# - Efficient layer caching with cache-from/cache-to -# - Platform-specific optimizations (ARM64 only for releases) -# -# 3. RELIABILITY & MAINTAINABILITY: -# - Robust version determination with multiple fallback strategies -# - Comprehensive error handling and status reporting -# - Automated registry cleanup to prevent storage issues -# - Build validation on pull requests without deployment -# - Production image verification with functional testing -# -# 4. DEPLOYMENT STRATEGY: -# - Pull Request: Build validation only (no registry push) -# - Main Branch: Single-platform development builds -# - Tagged Releases: Multi-platform production builds with security attestations -# - Scheduled: Automated cleanup and maintenance operations -# -# CONTAINER REGISTRY STRUCTURE: -# ------------------------------ -# ghcr.io/allthingslinux/tux: -# ├── latest # Latest main branch build -# ├── main # Main branch builds -# ├── v1.0.0, v1.1.0, etc. # Release versions -# ├── sha-abcd1234 # Commit-based tags -# └── pr-123 # Pull request builds (validation only) -# -# SUPPORTED PLATFORMS: -# -------------------- -# - linux/amd64: All builds (development, testing, production) -# - linux/arm64: Tagged releases only (v*.* patterns) -# -# SECURITY SCANNING: -# ------------------ -# - Trivy vulnerability scanner (Critical + High severity) -# - SARIF output integration with GitHub Security tab -# - Automated security advisory notifications -# - Container provenance and SBOM for supply chain security -# -# CACHE STRATEGY: -# --------------- -# - GitHub Actions cache: Build layer caching across workflow runs -# - BuildKit inline cache: Container layer caching within builds -# - Trivy cache: Vulnerability database caching for faster scans -# - Multi-level fallback: Hierarchical cache keys for optimal hit rates -# -# ============================================================================== + package-name: tux + package-type: container + min-versions-to-keep: 15 + delete-only-untagged-versions: true + - name: Cleanup Summary + run: |- + { + echo "## 🐳 Docker Registry Cleanup" + echo "- **Policy**: Keep 15 versions, remove untagged" + echo "- **Schedule**: Weekly cleanup" + echo "- **Status**: ✅ Cleanup completed" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/maintenance.yml b/.github/workflows/maintenance.yml index 501c80cdf..81aaedeb9 100644 --- a/.github/workflows/maintenance.yml +++ b/.github/workflows/maintenance.yml @@ -1,268 +1,235 @@ -# ============================================================================== -# TUX DISCORD BOT - AUTOMATED MAINTENANCE & HOUSEKEEPING WORKFLOW -# ============================================================================== -# -# This workflow handles automated maintenance tasks for the Tux Discord bot -# project, ensuring repository health, code quality tracking, and resource -# management. It provides intelligent automation for routine maintenance -# tasks while offering manual controls for administrative operations. -# -# MAINTENANCE CAPABILITIES: -# ------------------------- -# 1. Automated TODO/FIXME conversion to GitHub issues for task tracking -# 2. Docker image registry cleanup to prevent storage bloat -# 3. Repository health monitoring and reporting -# 4. Dependency freshness tracking and alerts -# 5. Repository statistics and metrics collection -# -# AUTOMATION STRATEGY: -# -------------------- -# - TODO Management: Real-time conversion on code changes -# - Image Cleanup: Monthly scheduled cleanup with configurable retention -# - Health Checks: Monthly comprehensive repository analysis -# - Manual Override: Administrative controls for immediate execution -# -# RESOURCE MANAGEMENT: -# -------------------- -# - Intelligent scheduling spread across different days -# - Configurable retention policies for different resource types -# - Non-blocking execution with graceful failure handling -# - Comprehensive logging for audit trails and debugging -# -# ============================================================================== +--- name: Maintenance -# TRIGGER CONFIGURATION -# Comprehensive maintenance scheduling with manual override capabilities -# Balances automated maintenance with administrative control on: - # REAL-TIME TODO TRACKING - # Converts TODOs to issues immediately when code changes are pushed push: - branches: - - main - - # MANUAL ADMINISTRATIVE CONTROLS - # Provides immediate access to maintenance operations for administrators + branches: [main] workflow_dispatch: inputs: - # DOCKER IMAGE CLEANUP CONTROLS - # Manual override for immediate image cleanup operations cleanup_images: description: Clean up old Docker images type: boolean default: false - - # RETENTION POLICY CONFIGURATION - # Configurable image retention for different cleanup scenarios keep_amount: description: Number of images to keep required: false default: '10' - - # UNTAGGED IMAGE MANAGEMENT - # Control over untagged image cleanup (typically development artifacts) remove_untagged: description: Remove untagged images type: boolean default: false - - # TODO TRACKING MANUAL CONTROLS - # Administrative overrides for TODO to issue conversion manual_commit_ref: description: SHA to compare for TODOs required: false manual_base_ref: description: Optional earlier SHA for TODOs required: false - - # SCHEDULED AUTOMATED MAINTENANCE - # Monthly comprehensive maintenance spread to avoid resource conflicts schedule: - - cron: 0 3 1 * * # Monthly cleanup on the 1st at 3 AM -# CONCURRENCY MANAGEMENT -# Prevents conflicting maintenance operations while allowing manual execution + - cron: 0 3 1 * * # Monthly cleanup on 1st at 3 AM + - cron: 0 2 * * 0 # Weekly cleanup on Sundays at 2 AM concurrency: group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: false # Maintenance operations should complete + cancel-in-progress: false +env: + ADMIN_PAT: ${{ secrets.ADMIN_PAT }} jobs: - # ============================================================================ - # TODO TO ISSUES CONVERSION - Automated Task Tracking - # ============================================================================ - # Purpose: Converts code TODOs and FIXMEs into trackable GitHub issues - # Strategy: Real-time conversion on code changes with intelligent categorization - # Benefits: Ensures no tasks are forgotten and provides proper project tracking - # Integration: Automatic assignment and labeling for efficient task management - # ============================================================================ - todo-to-issues: - name: Convert TODOs to Issues + todos: + name: TODOs runs-on: ubuntu-latest - # EXECUTION CONDITIONS - # Runs on code pushes or manual trigger with commit reference if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.manual_commit_ref) permissions: - contents: read # Required for repository access - issues: write # Required for issue creation and management + contents: read + issues: write steps: - # REPOSITORY CHECKOUT - # Full history required for accurate TODO comparison and tracking - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - name: Checkout + uses: actions/checkout@v4 with: fetch-depth: 0 - - # INTELLIGENT TODO CONVERSION - # Automated conversion with smart categorization and issue management - - name: Convert TODOs to Issues - uses: alstr/todo-to-issue-action@c45b007d85c8edf3365b139a9d4c65793e7c674f # v5.1.13 + - name: Convert + uses: alstr/todo-to-issue-action@v5.1.13 with: - CLOSE_ISSUES: true # Auto-close resolved TODOs - INSERT_ISSUE_URLS: true # Link issues back to code - AUTO_ASSIGN: true # Assign to commit authors - # CATEGORIZATION STRATEGY - # Different keywords map to different issue types and labels - IDENTIFIERS: '[{"name": "TODO", "labels": ["enhancement"]}, {"name": "FIXME", - "labels": ["bug"]}]' - ESCAPE: true # Handle special characters safely - # EXCLUSION PATTERNS - # Skip maintenance-heavy directories and lock files - IGNORE: .github/,node_modules/,dist/,build/,vendor/,poetry.lock - PROJECTS_SECRET: ${{ secrets.ADMIN_PAT }} + CLOSE_ISSUES: true + INSERT_ISSUE_URLS: true + AUTO_ASSIGN: true + IDENTIFIERS: | + [{"name": "TODO", "labels": ["enhancement"]}, {"name": "FIXME", "labels": ["bug"]}] + ESCAPE: true + IGNORE: | + .github/,node_modules/,dist/,build/,vendor/,uv.lock + PROJECTS_SECRET: ${{ env.ADMIN_PAT }} env: - # MANUAL OVERRIDE SUPPORT - # Allows administrative control over TODO scanning scope MANUAL_COMMIT_REF: ${{ github.event.inputs.manual_commit_ref }} MANUAL_BASE_REF: ${{ github.event.inputs.manual_base_ref }} - - # ============================================================================ - # DOCKER IMAGE CLEANUP - Container Registry Maintenance - # ============================================================================ - # Purpose: Automated cleanup of old Docker images to prevent storage bloat - # Strategy: Configurable retention policies with manual override capabilities - # Safety: Conservative defaults with explicit administrator controls - # Scope: Targets project-specific container images with version management - # ============================================================================ - cleanup-docker-images: - name: Cleanup Docker Images + cleanup: + name: Cleanup runs-on: ubuntu-latest - # EXECUTION CONDITIONS - # Runs on scheduled maintenance or manual trigger with image cleanup flag if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.cleanup_images == 'true') permissions: - packages: write # Required for container registry management - contents: read # Required for repository access + packages: write + contents: read steps: - # AUTOMATED IMAGE CLEANUP - # Configurable cleanup with safety mechanisms and retention policies - - name: Delete old container versions - uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5 - with: - package-name: tux # Target specific package - package-type: container # Container images only - # CONFIGURABLE RETENTION POLICY - # Default 10 images, override via manual trigger - min-versions-to-keep: ${{ github.event.inputs.keep_amount || '10' }} - # UNTAGGED IMAGE HANDLING - # Configurable untagged image cleanup (typically safe to remove) - delete-only-untagged-versions: ${{ github.event.inputs.remove_untagged || 'false' }} + - name: Checkout + uses: actions/checkout@v4 + - name: Registry Size Check + id: registry_size + run: | + echo "Checking registry size..." + # Get package info to check size + PACKAGE_INFO=$(gh api user/packages/container/tux 2>/dev/null || echo '{"size_in_bytes": 0}') + SIZE_BYTES=$(echo "$PACKAGE_INFO" | jq -r '.size_in_bytes // 0') + SIZE_GB=$(echo "scale=2; $SIZE_BYTES / 1024 / 1024 / 1024" | bc -l 2>/dev/null || echo "0") + { + echo "size_gb=$SIZE_GB" + echo "size_warning=$([ "$(echo "$SIZE_GB > 5" | bc -l)" = "1" ] && echo "true" || echo "false")" + } >> "$GITHUB_OUTPUT" + echo "Registry size: ${SIZE_GB}GB" - # ============================================================================ - # REPOSITORY HEALTH CHECK - Comprehensive Project Analysis - # ============================================================================ - # Purpose: Monthly comprehensive analysis of repository health and metrics - # Scope: File size analysis, dependency freshness, and project statistics - # Output: Structured reporting for project maintenance and planning - # Integration: Potential future integration with issue creation for problems - # ============================================================================ - health-check: - name: Repository Health Check + # Alert if size is too large + if (( $(echo "$SIZE_GB > 5" | bc -l) )); then + echo "⚠️ Registry size exceeds 5GB: ${SIZE_GB}GB" + else + echo "✅ Registry size is acceptable: ${SIZE_GB}GB" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Clean Old Images + uses: actions/delete-package-versions@v5 + with: + package-name: tux + package-type: container + min-versions-to-keep: ${{ github.event.inputs.keep_amount || '15' }} + delete-only-untagged-versions: ${{ github.event.inputs.remove_untagged || 'true' }} + - name: Clean Build Cache Images + run: | + echo "Cleaning up build cache images..." + # Delete build cache images older than 30 days + gh api user/packages/container/tux/versions | \ + jq -r '.[] | select(.name | contains("buildcache")) | select(.created_at < "'"$(date -d '30 days ago' -Iseconds)"'") | .id' | \ + xargs -I {} gh api -X DELETE user/packages/container/tux/versions/{} || echo "No build cache images to clean" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Registry Cleanup Summary + run: | + { + echo "## 🧹 Registry Cleanup Summary" + echo "- **Registry Size**: ${{ steps.registry_size.outputs.size_gb }}GB" + echo "- **Cleanup Policy**: Keep 15 versions, remove untagged" + echo "- **Build Cache**: Cleaned images older than 30 days" + if [ "${{ steps.registry_size.outputs.size_warning }}" = "true" ]; then + echo "- **⚠️ Warning**: Registry size exceeds 5GB" + else + echo "- **✅ Status**: Registry size is acceptable" + fi + } >> "$GITHUB_STEP_SUMMARY" + health: + name: Health Check runs-on: ubuntu-latest - # SCHEDULING - # Only runs on monthly scheduled maintenance for comprehensive analysis if: github.event_name == 'schedule' permissions: - contents: read # Required for repository analysis - issues: write # Required for future issue creation capabilities + contents: read + issues: write + packages: read steps: - # REPOSITORY CHECKOUT - # Required for comprehensive file and dependency analysis - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # STORAGE HEALTH ANALYSIS - # Identifies large files that may impact repository performance - - name: Check for large files + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Repository Health Summary run: | - echo "Checking for files larger than 50MB..." - find . -type f -size +50M -not -path "./.git/*" || echo "No large files found" - - # DEPENDENCY FRESHNESS ANALYSIS - # Monitors for outdated dependencies requiring security or feature updates - - name: Check for outdated dependencies + { + echo "## 📊 Repository Health Check" + echo "**Date**: $(date)" + echo "" + } >> "$GITHUB_STEP_SUMMARY" + - name: Check Large Files run: | - if command -v poetry &> /dev/null; then + { + echo "### 📁 Large Files Check" + echo "Checking for files larger than 50MB..." + } >> "$GITHUB_STEP_SUMMARY" + LARGE_FILES=$(find . -type f -size +50M -not -path "./.git/*" 2>/dev/null || echo "") + if [ -n "$LARGE_FILES" ]; then + { + echo "⚠️ **Large files found:**" + echo '```' + echo "$LARGE_FILES" + echo '```' + } >> "$GITHUB_STEP_SUMMARY" + else + echo "✅ **No large files found**" >> "$GITHUB_STEP_SUMMARY" + fi + echo "" >> "$GITHUB_STEP_SUMMARY" + - name: Check Dependencies + run: | + { + echo "### 📦 Dependencies Check" echo "Checking for outdated dependencies..." - poetry show --outdated || echo "All dependencies up to date" + } >> "$GITHUB_STEP_SUMMARY" + if command -v uv >/dev/null 2>&1; then + OUTDATED=$(uv outdated 2>/dev/null || echo "No outdated dependencies found") + { + echo '```' + echo "$OUTDATED" + echo '```' + } >> "$GITHUB_STEP_SUMMARY" + else + echo "⚠️ **uv not available for dependency check**" >> "$GITHUB_STEP_SUMMARY" fi + echo "" >> "$GITHUB_STEP_SUMMARY" + - name: Check Repository Size + run: | + { + echo "### 💾 Repository Size Analysis" + REPO_SIZE=$(du -sh . 2>/dev/null | cut -f1 || echo "Unknown") + echo "**Repository Size**: $REPO_SIZE" - # PROJECT METRICS COLLECTION - # Comprehensive repository statistics for project health monitoring - - name: Repository statistics + # Check .git size + GIT_SIZE=$(du -sh .git 2>/dev/null | cut -f1 || echo "Unknown") + echo "**Git History Size**: $GIT_SIZE" + echo "" + } >> "$GITHUB_STEP_SUMMARY" + - name: Check Stale Branches + run: | + { + echo "### 🌿 Branch Analysis" + echo "**Recent branches:**" + echo '```' + git branch -r --sort=-committerdate | head -10 2>/dev/null || echo "Could not check branches" + echo '```' + echo "" + } >> "$GITHUB_STEP_SUMMARY" + - name: Check Registry Health + run: | + { + echo "### 🐳 Container Registry Health" + if command -v gh >/dev/null 2>&1; then + # Get package info + PACKAGE_INFO=$(gh api user/packages/container/tux 2>/dev/null || echo '{"size_in_bytes": 0, "version_count": 0}') + SIZE_BYTES=$(echo "$PACKAGE_INFO" | jq -r '.size_in_bytes // 0') + VERSION_COUNT=$(echo "$PACKAGE_INFO" | jq -r '.version_count // 0') + SIZE_GB=$(echo "scale=2; $SIZE_BYTES / 1024 / 1024 / 1024" | bc -l 2>/dev/null || echo "0") + echo "**Registry Size**: ${SIZE_GB}GB" + echo "**Version Count**: $VERSION_COUNT" + if (( $(echo "$SIZE_GB > 5" | bc -l) )); then + echo "⚠️ **Warning**: Registry size exceeds 5GB" + else + echo "✅ **Status**: Registry size is acceptable" + fi + else + echo "⚠️ **GitHub CLI not available for registry check**" + fi + echo "" + } >> "$GITHUB_STEP_SUMMARY" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Check Recent Activity run: |- - echo "Repository Statistics:" - echo "=====================" - echo "Total files: $(find . -type f -not -path "./.git/*" | wc -l)" - echo "Python files: $(find . -name "*.py" -not -path "./.git/*" | wc -l)" - echo "Lines of Python code: $(find . -name "*.py" -not -path "./.git/*" -exec wc -l {} + 2>/dev/null | tail -1 || echo "0")" - echo "Docker files: $(find . -name "Dockerfile*" -o -name "docker-compose*.yml" | wc -l)" -# ============================================================================== -# MAINTENANCE WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. AUTOMATED TASK MANAGEMENT: -# - Real-time TODO to issue conversion for comprehensive task tracking -# - Intelligent categorization (TODO → enhancement, FIXME → bug) -# - Automatic assignment to commit authors for accountability -# - Smart exclusion patterns to avoid maintenance noise -# -# 2. RESOURCE MANAGEMENT: -# - Configurable Docker image retention policies -# - Scheduled cleanup to prevent storage bloat -# - Manual override capabilities for immediate administrative action -# - Conservative defaults with explicit administrative controls -# -# 3. REPOSITORY HEALTH MONITORING: -# - Comprehensive file size analysis for performance optimization -# - Dependency freshness tracking for security and feature updates -# - Project metrics collection for development planning -# - Structured reporting for maintenance decision making -# -# 4. OPERATIONAL EXCELLENCE: -# - Non-blocking execution with graceful failure handling -# - Comprehensive logging for audit trails and debugging -# - Intelligent scheduling to avoid resource conflicts -# - Manual override capabilities for emergency situations -# -# MAINTENANCE SCHEDULE: -# --------------------- -# - TODO Conversion: Real-time on every main branch push -# - Image Cleanup: Monthly on the 1st at 3 AM UTC -# - Health Checks: Monthly comprehensive analysis -# - Manual Triggers: Available for immediate administrative needs -# -# RETENTION POLICIES: -# ------------------- -# - Docker Images: 10 versions by default (configurable) -# - Untagged Images: Preserved by default (configurable) -# - Issues: Automatically closed when TODOs are resolved -# - Logs: Retained according to GitHub Actions standard retention -# -# ADMINISTRATIVE CONTROLS: -# ------------------------ -# - Manual image cleanup with custom retention settings -# - Custom TODO scanning with specific commit ranges -# - Immediate execution override for emergency maintenance -# - Configurable cleanup policies for different scenarios -# -# ============================================================================== + { + echo "### 📈 Recent Activity" + echo "**Recent commits:**" + echo '```' + git log --oneline --since="1 week ago" | head -10 2>/dev/null || echo "Could not check recent commits" + echo '```' + echo "" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6c737febc..ef005d09d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,75 +1,31 @@ -# ============================================================================== -# TUX DISCORD BOT - AUTOMATED RELEASE MANAGEMENT WORKFLOW -# ============================================================================== -# -# This workflow automates the release process for the Tux Discord bot, -# providing intelligent version management, comprehensive changelog generation, -# and automated release deployment. It ensures releases are properly tested, -# documented, and deployed with appropriate versioning and metadata. -# -# RELEASE CAPABILITIES: -# --------------------- -# 1. Automated release creation from git tags or manual triggers -# 2. Intelligent prerelease detection and handling -# 3. Comprehensive changelog generation from commit history -# 4. Integration with test suite validation before release -# 5. Automated GitHub release creation with proper metadata -# -# VERSIONING STRATEGY: -# -------------------- -# - Semantic Versioning (SemVer): v1.2.3 format for releases -# - Prerelease Support: Alpha, beta, rc versions with special handling -# - Manual Override: Administrative control for custom release scenarios -# - Git Tag Integration: Automatic detection and processing of version tags -# -# QUALITY ASSURANCE: -# ------------------ -# - Test Suite Integration: Waits for test completion before release -# - Version Validation: Ensures proper version format and consistency -# - Changelog Generation: Automated documentation of changes -# - Release Notes: Enhanced GitHub release notes with commit details -# -# ============================================================================== +--- name: Release -# TRIGGER CONFIGURATION -# Supports both automated and manual release creation workflows -# Provides flexibility for different release scenarios and administrative needs on: - # AUTOMATED GIT TAG RELEASES - # Triggered by semantic version tags pushed to the repository push: - tags: - - v* # Matches v1.0.0, v2.1.3-beta, v1.0.0-rc1, etc. - - # MANUAL RELEASE TRIGGER - # Administrative control for custom release scenarios and testing + tags: [v*] workflow_dispatch: inputs: - # VERSION SPECIFICATION - # Manual version input with validation and format requirements version: description: Version to release (e.g., v1.2.3) required: true type: string -# RELEASE PERMISSIONS -# Comprehensive permissions for release creation and artifact management permissions: - contents: write # Required for release creation and tag management - packages: write # Required for container image publishing - pull-requests: read # Required for changelog generation and integration + contents: write + packages: write + pull-requests: read jobs: - validate-release: - name: Validate Release + validate: + name: Validate runs-on: ubuntu-latest outputs: version: ${{ steps.version.outputs.version }} is_prerelease: ${{ steps.version.outputs.is_prerelease }} steps: - name: Checkout - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Determine version + - name: Determine Version id: version run: | if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then @@ -87,32 +43,28 @@ jobs: fi echo "Release version: $VERSION" echo "Is prerelease: $([ "$VERSION" != "${VERSION/alpha/}" ] || [ "$VERSION" != "${VERSION/beta/}" ] || [ "$VERSION" != "${VERSION/rc/}" ] && echo "true" || echo "false")" - - # Wait for tests to pass before creating release - wait-for-tests: + wait: name: Wait for Tests runs-on: ubuntu-latest steps: - - name: Wait for test workflow - uses: lewagon/wait-on-check-action@0dceb95e7c4cad8cc7422aee3885998f5cab9c79 # v1.4.0 + - name: Wait + uses: lewagon/wait-on-check-action@v1.4.0 with: ref: ${{ github.sha }} - check-name: Tests (Python 3.13) # Wait for the main test job + check-name: Tests (Unit Tests) repo-token: ${{ secrets.GITHUB_TOKEN }} wait-interval: 30 allowed-conclusions: success - create-release: + create: name: Create Release runs-on: ubuntu-latest - needs: - - validate-release - - wait-for-tests + needs: [validate, wait] steps: - name: Checkout - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Generate changelog + - name: Generate Changelog id: changelog run: | # Get the previous tag @@ -132,24 +84,12 @@ jobs: echo "EOF" } >> "$GITHUB_OUTPUT" fi - - name: Create GitHub Release - uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2 + - name: Create Release + uses: softprops/action-gh-release@v2 with: - tag_name: ${{ needs.validate-release.outputs.version }} - name: Release ${{ needs.validate-release.outputs.version }} + tag_name: ${{ needs.validate.outputs.version }} + name: Release ${{ needs.validate.outputs.version }} body: ${{ steps.changelog.outputs.changelog }} - prerelease: ${{ needs.validate-release.outputs.is_prerelease == 'true' }} + prerelease: ${{ needs.validate.outputs.is_prerelease == 'true' }} generate_release_notes: true - make_latest: ${{ needs.validate-release.outputs.is_prerelease == 'false' }} - notify-release: - name: Notify Release - runs-on: ubuntu-latest - needs: - - validate-release - - create-release - if: always() && needs.create-release.result == 'success' - steps: - - name: Release notification - run: |- - echo "🎉 Release ${{ needs.validate-release.outputs.version }} created successfully!" - echo "📋 Check the release page for details" + make_latest: ${{ needs.validate.outputs.is_prerelease == 'false' }} diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index c2919a573..ee88ea6d1 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -1,286 +1,130 @@ -# ============================================================================== -# TUX DISCORD BOT - COMPREHENSIVE SECURITY SCANNING WORKFLOW -# ============================================================================== -# -# This workflow provides comprehensive security scanning and vulnerability -# management for the Tux Discord bot project. It implements multiple layers -# of security analysis including static code analysis, dependency scanning, -# and automated security advisory management with intelligent automation -# for low-risk updates. -# -# SECURITY CAPABILITIES: -# ---------------------- -# 1. Multi-language static analysis with GitHub CodeQL -# 2. Dependency vulnerability scanning and review -# 3. Automated security advisory monitoring -# 4. Intelligent Dependabot auto-merge for patch/minor updates -# 5. Comprehensive vulnerability reporting and tracking -# -# SCANNING STRATEGY: -# ------------------ -# - CodeQL: Weekly comprehensive analysis for vulnerabilities -# - Dependency Review: Real-time analysis on pull requests -# - Safety Check: Continuous monitoring of Python dependencies -# - Dependabot: Automated updates with intelligent approval -# -# AUTOMATION FEATURES: -# -------------------- -# - Auto-approval of patch and minor dependency updates -# - Centralized security event reporting via SARIF -# - Intelligent scheduling to avoid resource conflicts -# - Conservative security policies with manual override options -# -# ============================================================================== +--- name: Security -# TRIGGER CONFIGURATION -# Comprehensive security scanning across different development stages -# Balances thorough coverage with resource efficiency on: - # MAIN BRANCH MONITORING - # Continuous security monitoring for production code push: - branches: - - main - - # PULL REQUEST SECURITY VALIDATION - # Real-time security checks for incoming changes + branches: [main] pull_request: - branches: - - main - - # SCHEDULED COMPREHENSIVE SCANNING - # Weekly deep analysis spread across different days from other workflows + branches: [main] schedule: - - cron: 20 7 * * 1 # Weekly on Mondays (spread from other schedules) -# CONCURRENCY MANAGEMENT -# Prevents resource conflicts while allowing parallel security analysis + - cron: 20 7 * * 1 concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} +env: + PYTHON_VERSION: '3.13' + SAFETY_SEVERITY: HIGH,CRITICAL jobs: - # ============================================================================ - # CODEQL STATIC ANALYSIS - Multi-Language Security Scanning - # ============================================================================ - # Purpose: Comprehensive static code analysis for security vulnerabilities - # Coverage: Python source code and GitHub Actions workflows - # Integration: GitHub Security tab with detailed vulnerability reports - # Frequency: Main branch pushes and weekly scheduled deep scans - # ============================================================================ + changes: + name: File Detection + runs-on: ubuntu-latest + outputs: + python: ${{ steps.python_changes.outputs.any_changed }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Check Python + uses: tj-actions/changed-files@v46 + id: python_changes + with: + files: | + **/*.py + pyproject.toml + uv.lock + files_ignore: | + tests/**/*.py + **/tests/**/*.py + **/migrations/**/*.py + src/tux/database/migrations/**/*.py codeql: - name: CodeQL Analysis + name: CodeQL runs-on: ubuntu-latest - # RESOURCE OPTIMIZATION - # Skips CodeQL on pull requests to save Actions minutes for critical tasks - # Focuses on main branch and scheduled runs for comprehensive coverage - if: github.event_name != 'pull_request' + needs: [changes] + if: (needs.changes.outputs.python == 'true' || github.event_name == 'workflow_dispatch') + && github.event_name != 'pull_request' permissions: - security-events: write # Required for SARIF upload - packages: read # Required for dependency analysis - actions: read # Required for workflow analysis - contents: read # Required for repository access - - # MULTI-LANGUAGE ANALYSIS STRATEGY - # Analyzes different languages with optimized configurations + security-events: write + packages: read + actions: read + contents: read strategy: fail-fast: false matrix: include: - # GITHUB ACTIONS WORKFLOW ANALYSIS - # Scans workflow files for security misconfigurations - language: actions build-mode: none - - # PYTHON SOURCE CODE ANALYSIS - # Comprehensive Python security vulnerability detection - language: python build-mode: none steps: - # REPOSITORY CHECKOUT - # Full repository access required for comprehensive analysis - - name: Checkout repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # CODEQL INITIALIZATION - # Configures language-specific analysis parameters - - name: Initialize CodeQL - uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3 + - name: Checkout + uses: actions/checkout@v4 + - name: Initialize + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} - - # SECURITY ANALYSIS EXECUTION - # Performs comprehensive static analysis with categorized results - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3 + - name: Analyze + uses: github/codeql-action/analyze@v3 with: - category: /language:${{matrix.language}} - - # ============================================================================ - # DEPENDENCY REVIEW - Real-time Vulnerability Assessment - # ============================================================================ - # Purpose: Real-time analysis of dependency changes in pull requests - # Scope: High-severity vulnerability detection and licensing compliance - # Integration: Automated PR comments with security recommendations - # Workflow: Blocks merging of PRs with high-severity vulnerabilities - # ============================================================================ - dependency-review: - name: Dependency Review + category: /language:${{ matrix.language }} + dependencies: + name: Dependencies runs-on: ubuntu-latest - # PULL REQUEST FOCUS - # Only analyzes dependency changes in pull requests for targeted feedback if: github.event_name == 'pull_request' permissions: - contents: read # Required for repository access - pull-requests: write # Required for PR comment posting + contents: read + pull-requests: write steps: - # REPOSITORY CHECKOUT - # Required for dependency comparison between base and head branches - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # DEPENDENCY VULNERABILITY ANALYSIS - # Analyzes dependency changes for security vulnerabilities - - name: Dependency Review - uses: actions/dependency-review-action@bc41886e18ea39df68b1b1245f4184881938e050 # v4 + - name: Checkout + uses: actions/checkout@v4 + - name: Review + uses: actions/dependency-review-action@v4 with: - fail-on-severity: high # Block high-severity vulnerabilities - comment-summary-in-pr: always # Always provide PR feedback - - # ============================================================================ - # SECURITY ADVISORIES - Python Dependency Vulnerability Monitoring - # ============================================================================ - # Purpose: Continuous monitoring of Python dependencies for security advisories - # Tools: Safety CLI for comprehensive vulnerability database checking - # Output: Structured JSON reports for tracking and remediation - # Integration: Artifact storage for security audit trails - # ============================================================================ - security-advisories: + fail-on-severity: high + comment-summary-in-pr: always + python: name: Python Security runs-on: ubuntu-latest - # MAIN BRANCH FOCUS - # Monitors production dependencies, skips pull request analysis - if: github.event_name != 'pull_request' + needs: [changes] + if: (needs.changes.outputs.python == 'true' || github.event_name == 'workflow_dispatch') + && github.event_name != 'pull_request' permissions: - contents: read # Required for repository access - security-events: write # Required for security event reporting + contents: read + security-events: write steps: - # REPOSITORY CHECKOUT - # Required for dependency file access and analysis - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # PYTHON ENVIRONMENT SETUP (COMPOSITE ACTION) - # Uses centralized Python setup for production dependency analysis - # Configured for security scanning with main dependencies only - - name: Setup Python Environment + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python uses: ./.github/actions/setup-python with: - python-version: '3.13' - install-groups: main - cache-suffix: security - generate-prisma: 'false' - - # SECURITY VULNERABILITY SCANNING - # Comprehensive security advisory checking with structured output - - name: Run Safety check + python-version: ${{ env.PYTHON_VERSION }} + enable-cache: true + - name: Check run: | pip install safety - # Ensure Poetry export plugin is available - poetry self add poetry-plugin-export - poetry export --without=dev --format=requirements.txt --output=requirements.txt + uv export --format requirements.txt --output-file requirements.txt safety check --json --output safety-report.json -r requirements.txt || true - - # SECURITY REPORT ARCHIVAL - # Stores security reports for audit trails and trend analysis - - name: Upload Safety results + - name: Upload Results if: always() - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@v4 with: name: safety-report path: safety-report.json retention-days: 30 - - # ============================================================================ - # DEPENDABOT AUTO-MERGE - Intelligent Dependency Update Automation - # ============================================================================ - # Purpose: Automated approval and merging of low-risk dependency updates - # Strategy: Conservative automation for patch and minor version updates - # Security: Repository-restricted execution to prevent supply chain attacks - # Scope: Patch-level and minor version updates only (excludes major changes) - # ============================================================================ - dependabot-auto-merge: - name: Auto-merge + dependabot: + name: Dependabot runs-on: ubuntu-latest - # SECURITY CONDITIONS - # Strict conditions to ensure automated merging is safe and appropriate - # Only processes Dependabot PRs from the same repository (not forks) - if: github.actor == 'dependabot[bot]' && github.event_name == 'pull_request' && - github.event.pull_request.head.repo.full_name == github.repository + if: github.actor == 'dependabot[bot]' permissions: - contents: write # Required for auto-approval - pull-requests: write # Required for PR management + contents: write + pull-requests: write steps: - # DEPENDABOT METADATA EXTRACTION - # Analyzes Dependabot PR metadata for intelligent automation decisions - - name: Dependabot metadata - id: metadata - uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b # v2.4.0 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - - # INTELLIGENT AUTO-APPROVAL - # Conservative automation focusing on low-risk updates only - # Patch updates: Bug fixes and security patches (1.0.0 → 1.0.1) - # Minor updates: New features with backward compatibility (1.0.0 → 1.1.0) - # Major updates: Breaking changes requiring manual review (excluded) - - name: Auto-approve patch and minor updates - if: steps.metadata.outputs.update-type == 'version-update:semver-patch' || - steps.metadata.outputs.update-type == 'version-update:semver-minor' - run: gh pr review --approve "$PR_URL" + - name: Checkout + uses: actions/checkout@v4 + - name: Auto-merge + run: | + gh pr merge --auto --merge "$PR_URL" || echo "Auto-merge failed, manual review required" env: - PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} -# ============================================================================== -# SECURITY WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. DEFENSE IN DEPTH: -# - Multi-layer security analysis (static, dynamic, dependency) -# - Comprehensive language coverage (Python, GitHub Actions) -# - Real-time and scheduled scanning strategies -# - Automated and manual security review processes -# -# 2. INTELLIGENT AUTOMATION: -# - Conservative auto-merge policies for low-risk updates -# - Repository-restricted execution to prevent supply chain attacks -# - Fail-safe mechanisms with manual override capabilities -# - Structured reporting for audit trails and compliance -# -# 3. PERFORMANCE OPTIMIZATION: -# - Strategic scheduling to avoid resource conflicts -# - Targeted scanning based on change context (PR vs main) -# - Efficient caching and dependency management -# - Resource-aware execution with appropriate timeouts -# -# 4. INTEGRATION & REPORTING: -# - GitHub Security tab integration via SARIF -# - Automated PR commenting for immediate feedback -# - Artifact storage for security audit trails -# - Centralized vulnerability management and tracking -# -# SECURITY COVERAGE: -# ------------------ -# - Static Analysis: CodeQL for Python and GitHub Actions -# - Dependency Scanning: Real-time vulnerability assessment -# - Advisory Monitoring: Continuous security advisory tracking -# - Supply Chain: Automated dependency update management -# - Compliance: Structured reporting and audit trail maintenance -# -# AUTOMATION POLICIES: -# -------------------- -# - Auto-approve: Patch and minor version updates only -# - Manual review: Major version updates and security-sensitive changes -# - Fail-safe: Conservative defaults with explicit override mechanisms -# - Audit trail: Comprehensive logging and artifact retention -# -# ============================================================================== + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9282446ce..def94deba 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,381 +1,259 @@ -# ============================================================================== -# TUX DISCORD BOT - COMPREHENSIVE TEST SUITE WORKFLOW -# ============================================================================== -# -# This workflow executes the complete test suite for the Tux Discord bot, -# providing comprehensive testing across multiple Python versions with detailed -# coverage reporting and result archival. Designed for reliability and -# comprehensive validation of all code paths. -# -# TESTING STRATEGY: -# ----------------- -# 1. Multi-version Python testing (3.13) for compatibility -# 2. Categorized test execution (Unit, Database, Integration) -# 3. Intelligent test discovery and conditional execution -# 4. Parallel test execution for performance optimization -# 5. Comprehensive coverage reporting with multiple flags -# 6. Artifact preservation for debugging and analysis -# -# COVERAGE STRATEGY: -# ------------------ -# - Unit Tests: Fast tests covering core functionality -# - Database Tests: Focused on database operations and models -# - Integration Tests: End-to-end scenarios marked as "slow" -# - Separate coverage reports for different test categories -# - Codecov integration for coverage tracking and visualization -# -# PERFORMANCE FEATURES: -# --------------------- -# - Smart change detection to skip unnecessary test runs -# - Python version-specific caching for faster dependency installation -# - Parallel pytest execution when test count justifies overhead -# - Conditional test suite execution based on test discovery -# - Efficient artifact management with reasonable retention periods -# -# RELIABILITY FEATURES: -# --------------------- -# - Matrix strategy with fail-fast disabled to see all failures -# - Integration test failures don't fail CI (continue-on-error) -# - Robust coverage file handling with debugging support -# - Test result upload even on test failures (!cancelled()) -# - Comprehensive error handling and status reporting -# -# ============================================================================== +--- name: Tests -# TRIGGER CONFIGURATION -# Comprehensive testing on all main branch pushes and pull requests -# Manual triggers available for debugging and testing specific scenarios on: push: - branches: - - main + branches: [main] pull_request: - branches: - - main - # Manual trigger for debugging test issues or validating changes + branches: [main] workflow_dispatch: -# CONCURRENCY CONTROL -# Prevents resource waste from multiple test runs on same branch -# Cancels PR runs but preserves main branch runs for complete validation concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} +env: + PYTHON_VERSION: '3.13' + COVERAGE_THRESHOLD: 70 # Set reasonable coverage threshold + UNIT_MARKERS: unit and not slow + INTEGRATION_MARKERS: integration and not slow jobs: - # ============================================================================ - # COMPREHENSIVE TEST EXECUTION - Multi-Version Matrix Testing - # ============================================================================ - # Purpose: Executes the complete test suite across multiple Python versions - # Strategy: Matrix testing for compatibility validation - # Categories: Unit tests, database tests, integration tests - # Coverage: Comprehensive reporting with category-specific tracking - # ============================================================================ - test: - name: Python ${{ matrix.python-version }} + changes: + name: File Detection runs-on: ubuntu-latest - permissions: - contents: read # Required for repository checkout and file access - - # MATRIX TESTING STRATEGY - # Tests multiple Python versions to ensure compatibility - # fail-fast disabled to see all version-specific issues - strategy: - fail-fast: false - matrix: - python-version: # Supported Python versions - - '3.13' + outputs: + python: ${{ steps.python_changes.outputs.any_changed }} + tests: ${{ steps.test_changes.outputs.any_changed }} + any: ${{ steps.test_changes.outputs.any_changed }} steps: - # REPOSITORY CHECKOUT - # Complete repository needed for comprehensive test execution - - name: Checkout Repository - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - - # SMART CHANGE DETECTION - # Analyzes changes to determine if test execution is necessary - # Includes all test-relevant files: source code, config, and tests - - name: Detect Python changes - uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5 + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Check Python + uses: tj-actions/changed-files@v46 id: python_changes with: files: | **/*.py pyproject.toml - poetry.lock + uv.lock + files_ignore: | + tests/**/*.py + **/tests/**/*.py + **/migrations/**/*.py + src/tux/database/migrations/**/*.py + - name: Check Tests + uses: tj-actions/changed-files@v46 + id: test_changes + with: + files: | tests/** conftest.py - - # CONDITIONAL EXECUTION CONTROL - # Skips expensive test setup when no relevant files changed - # Manual triggers always execute for debugging purposes - - name: Skip if no Python/test changes - if: steps.python_changes.outputs.any_changed != 'true' && github.event_name - != 'workflow_dispatch' + - name: Set Outputs run: | - echo "✅ No Python or test files changed, skipping tests" - echo "💡 To force run tests, use workflow_dispatch trigger" + { + echo "python=${{ steps.python_changes.outputs.any_changed }}" + echo "tests=${{ steps.test_changes.outputs.any_changed }}" + } >> "$GITHUB_OUTPUT" - # PYTHON ENVIRONMENT SETUP (COMPOSITE ACTION) - # Uses centralized Python setup with matrix-specific Python versions - # Configured for comprehensive testing with all dependency groups - - name: Setup Python Environment - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' + # Check if any relevant files changed + if [[ "${{ steps.python_changes.outputs.any_changed }}" == "true" ]] || \ + [[ "${{ steps.test_changes.outputs.any_changed }}" == "true" ]]; then + echo "any=true" >> "$GITHUB_OUTPUT" + else + echo "any=false" >> "$GITHUB_OUTPUT" + fi + unit: + name: Unit Tests + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.any == 'true' || github.event_name == 'workflow_dispatch' + permissions: + contents: read + pull-requests: write + strategy: + fail-fast: false + matrix: + python-version: ['3.13'] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Python uses: ./.github/actions/setup-python with: python-version: ${{ matrix.python-version }} - install-groups: dev,test,types - cache-suffix: test - generate-prisma: 'true' - - # TEST ENVIRONMENT CONFIGURATION - # Creates isolated test environment with SQLite for CI safety - # Prevents conflicts with production databases during testing - - name: Create test environment file - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' + enable-cache: true + - name: Create Test Environment uses: ./.github/actions/create-test-env with: additional-vars: | - PROD_DATABASE_URL=sqlite:///tmp/test.db - PROD_BOT_TOKEN=test_token_for_ci - - # ======================================================================== - # UNIT TEST EXECUTION - Core Functionality Testing - # ======================================================================== - # Purpose: Fast, focused tests covering core application logic - # Strategy: Parallel execution for large test suites, sequential for small - # Coverage: Comprehensive branch and line coverage with XML output - # Performance: Adaptive parallel/sequential execution based on test count - # ======================================================================== - - name: Run unit tests with coverage - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' + BOT_TOKEN=test_token_for_ci + DEBUG=True + - name: Run Unit Tests run: | echo "Running unit tests with coverage..." - # ADAPTIVE PARALLEL EXECUTION - # Uses pytest-xdist for parallel execution when beneficial - # Threshold of 10 tests balances overhead vs performance gain - TEST_COUNT=$(poetry run pytest --collect-only -q tests/ -m "not slow and not docker" 2>/dev/null | grep -c "test session starts" || echo "0") - if [ "$TEST_COUNT" -gt 10 ]; then - echo "Running $TEST_COUNT tests in parallel..." - poetry run pytest tests/ -v --cov=tux --cov-branch --cov-report=xml:coverage-unit.xml --cov-report=term-missing -m "not slow and not docker" --junitxml=junit-unit.xml -o junit_family=legacy --cov-fail-under=0 -n auto - else - echo "Running $TEST_COUNT tests sequentially..." - poetry run pytest tests/ -v --cov=tux --cov-branch --cov-report=xml:coverage-unit.xml --cov-report=term-missing -m "not slow and not docker" --junitxml=junit-unit.xml -o junit_family=legacy --cov-fail-under=0 - fi + # Run only unit tests (py-pglite based) + # Note: Using pytest-parallel instead of pytest-xdist for py-pglite compatibility + uv run pytest tests/unit/ \ + --cov-report=xml:coverage-unit.xml \ + --cov-report=term-missing:skip-covered \ + -m "${{ env.UNIT_MARKERS }}" \ + --junitxml=junit-unit.xml \ + --cov-fail-under=${{ env.COVERAGE_THRESHOLD }} | tee pytest-coverage.txt echo "Unit test coverage generation completed" - - # COVERAGE DEBUG SUPPORT - # Provides detailed diagnostics when coverage upload fails - # Helps troubleshoot coverage generation and file system issues - - name: Debug coverage file before upload - if: failure() - run: | - echo "🔍 Debugging coverage files due to failure..." - ls -la coverage-*.xml || echo "No coverage files found" - if [ -f ./coverage-unit.xml ]; then - echo "Unit coverage file size: $(stat -c%s ./coverage-unit.xml) bytes" - echo "Unit coverage file first few lines:" - head -n 5 ./coverage-unit.xml || echo "Could not read coverage file" - else - echo "Unit coverage file not found" - fi - - # UNIT TEST COVERAGE AND RESULTS REPORTING - # Uploads coverage data and test results to Codecov with specific flags - # Robust configuration prevents CI failures from coverage upload issues - - name: Upload unit test coverage and results to Codecov - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' - uses: ./.github/actions/upload-coverage + - name: Upload Unit Test Coverage + uses: actions/upload-artifact@v4 with: - coverage-file: ./coverage-unit.xml - junit-file: ./junit-unit.xml - flags: unit - name: unit-tests - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - # ======================================================================== - # DATABASE TEST EXECUTION - Data Layer Validation - # ======================================================================== - # Purpose: Focused testing of database operations and models - # Strategy: Conditional execution based on test discovery - # Coverage: Database-specific coverage reporting - # Safety: Only runs when database tests actually exist - # ======================================================================== - - # DYNAMIC DATABASE TEST DISCOVERY - # Checks for existence of database tests before execution - # Prevents unnecessary setup and provides clear status reporting - - name: Check for database tests - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' - id: check_db_tests - run: | - if find tests/tux/database/ -name "test_*.py" -type f | grep -q .; then - echo "has_tests=true" >> "$GITHUB_OUTPUT" - echo "Database tests found" - else - echo "has_tests=false" >> "$GITHUB_OUTPUT" - echo "No database tests found, skipping database test suite" - fi - - # DATABASE TEST EXECUTION - # Focused testing of database layer with dedicated coverage - # Targets only database directory for precise scope - - name: Run database tests with coverage - if: steps.check_db_tests.outputs.has_tests == 'true' - run: poetry run pytest tests/tux/database/ -v --cov=tux/database --cov-branch - --cov-report=xml:coverage-database.xml --junitxml=junit-database.xml -o - junit_family=legacy --cov-fail-under=0 - - # DATABASE COVERAGE AND RESULTS REPORTING - # Separate coverage tracking for database-specific functionality - # Provides granular insights into data layer test coverage - - name: Upload database test coverage and results to Codecov - if: steps.check_db_tests.outputs.has_tests == 'true' && hashFiles('./coverage-database.xml') - != '' - uses: ./.github/actions/upload-coverage + name: unit-test-coverage + path: | + pytest-coverage.txt + junit-unit.xml + if-no-files-found: ignore + integration: + name: Integration Tests + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.any == 'true' || github.event_name == 'workflow_dispatch' + permissions: + contents: read + pull-requests: write + strategy: + fail-fast: false + matrix: + python-version: ['3.13'] + steps: + - name: Checkout + uses: actions/checkout@v4 with: - coverage-file: ./coverage-database.xml - junit-file: ./junit-database.xml - flags: database - name: database-tests - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - # ======================================================================== - # INTEGRATION TEST EXECUTION - End-to-End Validation - # ======================================================================== - # Purpose: Comprehensive end-to-end testing of complete workflows - # Strategy: Marked as "slow" tests, conditional execution, non-blocking - # Coverage: Full application coverage in realistic scenarios - # Policy: Failures don't block CI but are reported for investigation - # ======================================================================== - - # DYNAMIC INTEGRATION TEST DISCOVERY - # Uses pytest marker system to identify integration tests - # Prevents execution overhead when no integration tests exist - - name: Check for integration tests - if: steps.python_changes.outputs.any_changed == 'true' || github.event_name - == 'workflow_dispatch' - id: check_integration_tests - run: | - if poetry run pytest --collect-only -m "slow" -q tests/ | grep -q "test session starts"; then - echo "has_tests=true" >> "$GITHUB_OUTPUT" - echo "Integration tests found" - else - echo "has_tests=false" >> "$GITHUB_OUTPUT" - echo "No integration tests found, skipping integration test suite" - fi - - # COVERAGE FILE MANAGEMENT - # Cleans previous coverage files to prevent conflicts - # Ensures clean slate for integration test coverage reporting - - name: Clean up previous coverage files before integration tests - if: steps.check_integration_tests.outputs.has_tests == 'true' + fetch-depth: 0 + - name: Setup Python + uses: ./.github/actions/setup-python + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + - name: Create Test Environment + uses: ./.github/actions/create-test-env + with: + additional-vars: | + BOT_TOKEN=test_token_for_ci + DEBUG=True + - name: Run Integration Tests run: | - echo "Cleaning up previous coverage files to avoid conflicts..." - rm -f coverage-unit.xml coverage-database.xml || true - echo "Current coverage files:" - ls -la coverage-*.xml 2>/dev/null || echo "No coverage files found" - - # INTEGRATION TEST EXECUTION - # Non-blocking execution allows CI to continue even with integration failures - # Provides realistic end-to-end testing without blocking development - - name: Run integration tests with coverage - if: steps.check_integration_tests.outputs.has_tests == 'true' - run: poetry run pytest tests/ -v --cov=tux --cov-branch --cov-report=xml:coverage-integration.xml - -m "slow" --junitxml=junit-integration.xml -o junit_family=legacy --cov-fail-under=0 - continue-on-error: true # Don't fail CI if integration tests fail - - # INTEGRATION COVERAGE AND RESULTS REPORTING - # Captures coverage from comprehensive end-to-end scenarios - # Provides insights into real-world usage patterns - - name: Upload integration test coverage and results to Codecov - if: steps.check_integration_tests.outputs.has_tests == 'true' && hashFiles('./coverage-integration.xml') - != '' - uses: ./.github/actions/upload-coverage + echo "Running integration tests with coverage..." + # Integration tests also use py-pglite (self-contained PostgreSQL) + # No external PostgreSQL setup required + uv run pytest tests/integration/ \ + --cov-report=xml:coverage-integration.xml \ + --cov-report=term-missing:skip-covered \ + -m "${{ env.INTEGRATION_MARKERS }}" \ + --junitxml=junit-integration.xml \ + --cov-fail-under=${{ env.COVERAGE_THRESHOLD }} | tee pytest-integration-coverage.txt + echo "Integration test coverage generation completed" + - name: Upload Integration Test Coverage + uses: actions/upload-artifact@v4 with: - coverage-file: ./coverage-integration.xml - junit-file: ./junit-integration.xml - flags: integration - name: integration-tests - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - # NOTE: Integration test results are already handled by the composite action above - - # ======================================================================== - # ARTIFACT PRESERVATION - Test Results and Coverage Archive - # ======================================================================== - # Purpose: Preserves test artifacts for debugging and analysis - # Strategy: Upload all test outputs regardless of success/failure - # Retention: 30-day retention for reasonable debugging window - # Organization: Python version-specific artifacts for precise debugging - # ======================================================================== - - name: Upload test artifacts - if: always() - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + name: integration-test-coverage + path: | + pytest-integration-coverage.txt + junit-integration.xml + if-no-files-found: ignore + e2e: + name: E2E Tests + runs-on: ubuntu-latest + needs: [changes] + if: needs.changes.outputs.any == 'true' || github.event_name == 'workflow_dispatch' + permissions: + contents: read + pull-requests: write + strategy: + fail-fast: false + matrix: + python-version: ['3.13'] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Python + uses: ./.github/actions/setup-python + with: + python-version: ${{ matrix.python-version }} + enable-cache: true + - name: Create Test Environment + uses: ./.github/actions/create-test-env + with: + additional-vars: | + BOT_TOKEN=test_token_for_ci + DEBUG=1 + - name: Run E2E Tests + run: | + echo "Running E2E tests with coverage..." + # E2E tests use py-pglite for database operations + uv run pytest tests/e2e/ \ + --cov-report=xml:coverage-e2e.xml \ + --cov-report=term-missing:skip-covered \ + --junitxml=junit-e2e.xml \ + --cov-fail-under=${{ env.COVERAGE_THRESHOLD }} | tee pytest-e2e-coverage.txt + echo "E2E test coverage generation completed" + - name: Upload E2E Test Coverage + uses: actions/upload-artifact@v4 with: - name: test-results-python-${{ matrix.python-version }} + name: e2e-test-coverage path: | - coverage-*.xml - junit-*.xml - htmlcov/ - retention-days: 30 -# ============================================================================== -# TEST WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. COMPREHENSIVE TESTING STRATEGY: -# - Multi-version Python compatibility testing -# - Categorized test execution (unit, database, integration) -# - Intelligent test discovery and conditional execution -# - Parallel test execution for performance optimization -# -# 2. ROBUST COVERAGE REPORTING: -# - Category-specific coverage tracking with flags -# - Multiple coverage report formats (XML, terminal) -# - Codecov integration for visualization and tracking -# - Coverage debugging support for troubleshooting -# -# 3. PERFORMANCE OPTIMIZATION: -# - Smart change detection to skip unnecessary runs -# - Python version-specific caching strategies -# - Adaptive parallel/sequential test execution -# - Efficient artifact management with reasonable retention -# -# 4. RELIABILITY & FAULT TOLERANCE: -# - Matrix strategy with fail-fast disabled -# - Integration test failures don't block CI -# - Comprehensive error handling and debugging support -# - Test result reporting even on failures -# -# 5. DEVELOPER EXPERIENCE: -# - Clear status messages and skip explanations -# - Comprehensive artifact preservation for debugging -# - Manual trigger support for testing workflow changes -# - Detailed test categorization and reporting -# -# 6. SECURITY & ISOLATION: -# - Isolated test environment with SQLite -# - No production data exposure during testing -# - Secure token handling for coverage reporting -# - Read-only permissions for repository access -# -# USAGE EXAMPLES: -# --------------- -# Manual test execution: -# GitHub UI → Actions → Tests → Run workflow -# -# Debug specific Python version: -# Check matrix job for specific version in Actions tab -# -# Analyze coverage: -# Visit Codecov dashboard for detailed coverage analysis -# -# Download test artifacts: -# Actions tab → workflow run → Artifacts section -# -# View test results: -# Actions tab → workflow run → job details → test steps -# -# ============================================================================== + pytest-e2e-coverage.txt + junit-e2e.xml + if-no-files-found: ignore + coverage-report: + name: Coverage Report + runs-on: ubuntu-latest + needs: [changes, unit, integration, e2e] + if: always() && (needs.changes.outputs.any == 'true' || github.event_name == 'workflow_dispatch') + permissions: + contents: read + pull-requests: write + id-token: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download Unit Test Coverage + uses: actions/download-artifact@v4 + if: needs.unit.result == 'success' + with: + name: unit-test-coverage + path: . + - name: Download Integration Test Coverage + uses: actions/download-artifact@v4 + if: needs.integration.result == 'success' + with: + name: integration-test-coverage + path: . + - name: Download E2E Test Coverage + uses: actions/download-artifact@v4 + if: needs.e2e.result == 'success' + with: + name: e2e-test-coverage + path: . + - name: Generate Coverage Report + uses: MishaKav/pytest-coverage-comment@main + with: + multiple-files: | + ${{ needs.unit.result == 'success' && 'Unit Tests, ./pytest-coverage.txt, ./junit-unit.xml' || '' }} + ${{ needs.integration.result == 'success' && 'Integration Tests, ./pytest-integration-coverage.txt, ./junit-integration.xml' || '' }} + ${{ needs.e2e.result == 'success' && 'E2E Tests, ./pytest-e2e-coverage.txt, ./junit-e2e.xml' || '' }} + title: Comprehensive Test Coverage Report + badge-title: Coverage + report-only-changed-files: true + - name: Upload Coverage to Codecov + uses: codecov/codecov-action@v5 + with: + files: | + ${{ needs.unit.result == 'success' && 'coverage-unit.xml' || '' }} + ${{ needs.integration.result == 'success' && 'coverage-integration.xml' || '' }} + ${{ needs.e2e.result == 'success' && 'coverage-e2e.xml' || '' }} + flags: ${{ needs.unit.result == 'success' && 'unit,' || '' }}${{ needs.integration.result + == 'success' && 'integration,' || '' }}${{ needs.e2e.result == 'success' + && 'e2e' || '' }} + name: tux-coverage + fail_ci_if_error: false + verbose: true + use_oidc: true diff --git a/.gitignore b/.gitignore index 9771bc0bb..639d23e50 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,8 @@ htmlcov/ .cache nosetests.xml coverage.xml +coverage.json +lcov.info *.cover *.py,cover .hypothesis/ @@ -87,6 +89,9 @@ ipython_config.py # Pipenv Pipfile.lock +# uv +uv.lock + # Poetry poetry.lock @@ -155,8 +160,8 @@ github-private-key.pem # Miscellaneous /debug.csv -config/settings* -!config/settings.yml.example + + # MacOS .DS_Store @@ -181,3 +186,16 @@ prisma_binaries/ .archive/ reports/ + +.kiro +.audit + +.prisma-archive +sqlmodel-refactor +.database-archive +data/ +examples/ +.amazonq/cli-todo-lists/ + +# solution for developers who switch between branches a lot +config/settings.yml diff --git a/.markdownlint.yaml b/.markdownlint.yaml index 29b607b87..0e7f814e7 100644 --- a/.markdownlint.yaml +++ b/.markdownlint.yaml @@ -1,3 +1,4 @@ +--- # Example markdownlint configuration with all properties set to their default value # Default state for all rules diff --git a/.markdownlintignore b/.markdownlintignore index 1d13909e5..d69fbde7a 100644 --- a/.markdownlintignore +++ b/.markdownlintignore @@ -7,6 +7,9 @@ docker-compose*.yml *.lock +# Exclude auto-generated files +CONFIG.md + # Exclude build and cache directories .venv/ .pytest_cache/ @@ -29,3 +32,14 @@ prisma/ typings/ .github/ + +.kiro/ + +.audit/ + +# Project-specific ignores +sqlmodel-refactor/** +docs/db/README.md + +.archive +.archive/** diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b8bb83cff..1b94c0def 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,10 @@ +--- default_language_version: python: python3.13 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - - id: check-yaml - id: check-json - id: check-toml - id: end-of-file-fixer @@ -14,19 +14,18 @@ repos: rev: v0.24.1 hooks: - id: validate-pyproject - additional_dependencies: - - validate-pyproject-schema-store[all] + additional_dependencies: ['validate-pyproject-schema-store[all]'] - repo: https://github.com/lyz-code/yamlfix - rev: 1.17.0 + rev: 1.18.0 hooks: - id: yamlfix - exclude: \.(commitlintrc|pre-commit-hooks)\.yaml$ + exclude: \.(commitlintrc|pre-commit-hooks)\.yaml$|docker-compose.*\.yml$ - repo: https://github.com/adrienverge/yamllint rev: v1.37.1 hooks: - id: yamllint - args: - - -c=.yamllint.yml + exclude: docker-compose.*\.yml$ + args: [-c=.yamllint.yml] - repo: https://github.com/rhysd/actionlint rev: v1.7.7 hooks: @@ -39,20 +38,19 @@ repos: rev: v3.20.0 hooks: - id: pyupgrade - args: - - --py313-plus + args: [--py313-plus] + exclude: ^(src/tux/database/models/.*\.py)$ - repo: https://github.com/asottile/add-trailing-comma rev: v3.2.0 hooks: - id: add-trailing-comma - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.12.7 + rev: v0.12.12 hooks: - id: ruff-check - args: - - --fix + args: [--fix] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.12.7 + rev: v0.12.12 hooks: - id: ruff-format - repo: https://github.com/gitleaks/gitleaks @@ -63,12 +61,41 @@ repos: rev: v9.22.0 hooks: - id: commitlint - stages: - - commit-msg + stages: [commit-msg] additional_dependencies: - '@commitlint/cli' - '@commitlint/config-conventional' -exclude: ^(\.archive/|.*typings/|node_modules/|\.venv/).*$ + # - repo: local + # hooks: + # - id: settings-doc-markdown + # name: Generate settings-doc Markdown + # description: This hook generates a Markdown documentation from pydantic.BaseSettings + # to a file. + # yamllint disable-line rule:line-length + # entry: uv run settings-doc generate --module tux.shared.config.settings --output-format markdown --update CONFIG.md --between "" "" --heading-offset 1 + # language: system + # types: [file, python] + # pass_filenames: false + # - id: settings-doc-dotenv + # name: Generate settings-doc env.example + # description: This hook generates an env.example template from pydantic.BaseSettings + # to a file. + # entry: uv run settings-doc generate --module tux.shared.config.settings --output-format + # dotenv --update env.example + # language: system + # types: [file, python] + # pass_filenames: false + # Temporarily disabled - causes conflicts with end-of-file-fixer + # TODO: Re-enable once we resolve the newline handling issue + # - id: settings-doc-env-example + # name: Generate env.example template + # description: This hook generates env.example from pydantic.BaseSettings to + # a file. + # entry: make docs-env-example + # language: system + # types: [file, python] + # pass_filenames: false +exclude: ^(\.archive/|.*typings/|node_modules/|\.venv/|\.kiro/).*$ ci: autofix_commit_msg: 'style: auto fixes from pre-commit hooks' autoupdate_commit_msg: 'chore: update pre-commit hook versions' diff --git a/.reviewdog.yml b/.reviewdog.yml new file mode 100644 index 000000000..95ddcdf15 --- /dev/null +++ b/.reviewdog.yml @@ -0,0 +1,121 @@ +--- +# ============================================================================== +# REVIEWDOG CONFIGURATION - GitHub PR Commenting +# ============================================================================== +# +# This configuration file defines how reviewdog processes different linters +# and formats their output for GitHub pull request comments. +# +# INTEGRATED TOOLS: +# ----------------- +# - basedpyright: Python type checking +# - ruff: Python linting and formatting +# - markdownlint: Markdown documentation linting +# - shellcheck: Shell script analysis +# - shfmt: Shell script formatting +# - actionlint: GitHub Actions validation +# - hadolint: Dockerfile linting +# - yamllint: YAML validation +# - gitleaks: Secret scanning +# - trivy: Container security scanning +# +# ============================================================================== +# REVIEWDOG BEHAVIOR +# Global settings for all tools +reviewdog: + reporter: github-pr-review + filter_mode: file + fail_on_error: false + level: warning +# ============================================================================== +# RUNNER CONFIGURATIONS +# ============================================================================== +# Python Type Checking +runner: + basedpyright: + cmd: uv run basedpyright --outputformat json + format: rdjson + name: Basedpyright + level: warning + + # Python Linting + ruff: + cmd: uv run ruff check --output-format json . + format: rdjson + name: Ruff + level: warning + + # Markdown Linting + markdownlint: + cmd: npx markdownlint --format json . + format: rdjson + name: MarkdownLint + level: warning + + # Shell Script Analysis + shellcheck: + cmd: shellcheck --format json --shell bash --severity warning --color never $(find + . -name "*.sh" -o -name "*.bash" -o -name "*.zsh" -o -path "./scripts/*") + format: rdjson + name: ShellCheck + level: warning + + # Shell Script Formatting + shfmt: + cmd: shfmt -i 2 -ci -bn -sr -kp -w -s -p -f . | xargs shfmt -i 2 -ci -bn -sr -kp + -w -s -p -d + format: diff + name: shfmt + level: warning + + # GitHub Actions Validation + actionlint: + cmd: actionlint -format json + format: rdjson + name: ActionLint + level: warning + + # Dockerfile Linting + hadolint: + cmd: hadolint --format json Dockerfile + format: rdjson + name: Hadolint + level: warning + + # YAML Validation + yamllint: + cmd: yamllint --format json . + format: rdjson + name: YAMLLint + level: warning + + # Secret Scanning + gitleaks: + cmd: gitleaks detect --format json --report-format json --report . + format: rdjson + name: Gitleaks + level: error + + # Container Security Scanning + trivy: + cmd: trivy config --format json . + format: rdjson + name: Trivy + level: warning +# ============================================================================== +# USAGE EXAMPLES: +# -------------- +# +# Run all tools: +# reviewdog -conf .reviewdog.yml +# +# Run specific tools: +# reviewdog -conf .reviewdog.yml -runners=basedpyright,ruff +# +# Run with custom reporter: +# reviewdog -conf .reviewdog.yml -reporter=github-pr-check +# +# Debug configuration: +# reviewdog -conf .reviewdog.yml -tee +# +# ============================================================================== diff --git a/.vscode/extensions.json b/.vscode/extensions.json index f819e218e..23023e952 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -13,6 +13,7 @@ "usernamehw.errorlens", "sourcery.sourcery", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "ms-azuretools.vscode-containers" ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 496a90cd6..53dfac68d 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -11,29 +11,12 @@ "source.organizeImports.ruff": "explicit" } }, - "python.analysis.typeCheckingMode": "off", - "cursorpyright.analysis.typeCheckMode": "off", "python.languageServer": "None", - "python.analysis.autoFormatStrings": true, - "python.analysis.completeFunctionParens": true, - "python.analysis.autoImportCompletions": true, - "python.analysis.inlayHints.functionReturnTypes": true, - "python.analysis.inlayHints.variableTypes": true, - "python.analysis.inlayHints.callArgumentNames": "all", "python.terminal.activateEnvInCurrentTerminal": true, - "python.analysis.exclude": [ - ".archive/**", - "build/**" - ], - "python.analysis.diagnosticSeverityOverrides": { - "reportIncompatibleMethodOverride": "none", - "reportGeneralTypeIssues": "information" - }, - "python.poetryPath": "poetry", + "python.terminal.executeInFileDir": false, "python.testing.pytestEnabled": true, - "python.testing.unittestEnabled": true, - "python.testing.cwd": "${workspaceFolder}", - "python.testing.autoTestDiscoverOnSaveEnabled": true, + "python.testing.autoTestDiscoverOnSaveEnabled": false, + "autoDocstring.docstringFormat": "numpy", "coverage-gutters.coverageFileNames": [ "coverage.xml", "coverage.lcov", @@ -45,8 +28,6 @@ "coverage-gutters.showGutterCoverage": false, "coverage-gutters.showLineCoverage": true, "coverage-gutters.showRulerCoverage": true, - "python.terminal.executeInFileDir": false, - "python.terminal.launchArgs": [], "files.exclude": { "**/__pycache__": true, "**/*.pyc": true, @@ -69,7 +50,7 @@ "git.fetchOnPull": true, "[markdown]": { "files.trimTrailingWhitespace": false, - "editor.defaultFormatter": "DavidAnson.vscode-markdownlint" + "editor.defaultFormatter": "yzhang.markdown-all-in-one" }, "markdownlint.config": { "extends": ".markdownlint.yaml" diff --git a/.yamllint.yml b/.yamllint.yml index 555c19552..a81b8ac7c 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -1,57 +1,40 @@ +--- extends: default rules: + document-start: disable + + # Allow longer lines for readability in configuration files + line-length: + max: 185 + level: warning + + # Allow empty values in mappings (common in Docker Compose) + empty-values: + forbid-in-block-mappings: false + forbid-in-flow-mappings: false + + # Be more lenient with indentation for nested structures indentation: spaces: 2 indent-sequences: true check-multi-line-strings: false - comments: - min-spaces-from-content: 1 - require-starting-space: true - comments-indentation: disable - document-start: - present: false - document-end: - present: false - new-line-at-end-of-file: enable - trailing-spaces: enable - line-length: disable - brackets: - min-spaces-inside: 0 - max-spaces-inside: 1 - braces: - min-spaces-inside: 0 - max-spaces-inside: 1 truthy: - allowed-values: - - 'true' - - 'false' - - 'yes' - - 'no' - - 'on' - - 'off' check-keys: false - empty-values: - forbid-in-block-mappings: false - forbid-in-flow-mappings: false + + # Allow comments to start anywhere + comments-indentation: disable + + # Allow trailing spaces in empty lines + empty-lines: + max-start: 1 + max-end: 1 + max: 2 + + # Allow dashes in key names (common in GitHub Actions) key-duplicates: enable - key-ordering: disable - float-values: disable - octal-values: disable -ignore: |- - .venv/ - .pytest_cache/ - .ruff_cache/ - __pycache__/ - .cache/ - htmlcov/ - .archive/ - logs/ - .devcontainer/ - .vscode/ - .cursor/ - poetry.lock - flake.lock - prisma/ - typings/ - docs/ - tests/fixtures/ + + # Allow brackets in flow sequences + brackets: enable + + # Allow braces in flow mappings + braces: enable diff --git a/DEVELOPER.md b/DEVELOPER.md deleted file mode 100644 index d26219da7..000000000 --- a/DEVELOPER.md +++ /dev/null @@ -1,35 +0,0 @@ -# Developer Guide: Tux - -Welcome to the Tux developer documentation! - -This area provides in-depth information for developers working on Tux, beyond the initial setup and contribution workflow. - -## Getting Started & Contributing - -For information on setting up your environment, the development workflow (branching, PRs), and basic quality checks, please refer to the main contribution guide: - -* [**Contributing Guide**](./.github/CONTRIBUTING.md) - -## Developer Topics - -Explore the following pages for more detailed information on specific development aspects: - -* **[Local Development](./docs/content/dev/local_development.md)** - * Running the bot locally. - * Understanding the hot reloading mechanism. -* **[Tux CLI Usage](./docs/content/dev/cli/index.md)** - * Understanding development vs. production modes (`--dev`, `--prod`). - * Overview of command groups (`bot`, `db`, `dev`, `docker`). -* **[Code Coverage](./docs/content/dev/coverage.md)** - * Running tests with coverage tracking. - * Generating and interpreting coverage reports. - * Using `tux test run`, `tux test coverage`, and related commands. -* **[Database Management](./docs/content/dev/database.md)** - * Detailed usage of `tux db` commands (push, migrate, generate, pull, reset). - * Working with Prisma migrations. -* **[Database Controller Patterns](./docs/content/dev/database_patterns.md)** - * Using controllers for CRUD, transactions, relations. - * Best practices for database interactions in code. -* **[Docker Environment](./docs/content/dev/docker_development.md)** (Optional) - * Setting up and using the Docker-based development environment. - * Running commands within Docker containers. diff --git a/DOCKER.md b/DOCKER.md deleted file mode 100644 index f2977ac1d..000000000 --- a/DOCKER.md +++ /dev/null @@ -1,683 +0,0 @@ - -# Tux Docker Setup - Complete Guide - -This comprehensive guide covers the optimized Docker setup for Tux, including performance improvements, testing strategies, security measures, and practical usage. - -## 📑 Table of Contents - -- [🚀 Performance Achievements](#-performance-achievements) -- [📋 Quick Start](#-quick-start) -- [🧪 Testing Strategy](#-testing-strategy) -- [🏗️ Architecture Overview](#-architecture-overview) -- [🛡️ Security Features](#-security-features) -- [🔧 Development Features](#-development-features) -- [📊 Performance Monitoring](#-performance-monitoring) -- [🔄 Environment Management](#-environment-management) -- [🧹 Safe Cleanup Operations](#-safe-cleanup-operations) -- [📈 Performance Baselines](#-performance-baselines) -- [🏥 Health Checks & Monitoring](#-health-checks-and-monitoring) -- [🚨 Troubleshooting](#-troubleshooting) -- [📚 Advanced Usage](#-advanced-usage) -- [🎯 Best Practices](#-best-practices) -- [📊 Metrics & Reporting](#-metrics--reporting) -- [🎉 Success Metrics](#-success-metrics) -- [📞 Support & Maintenance](#-support--maintenance) -- [📂 Related Documentation](#-related-documentation) - -## 🚀 Performance Achievements - -Our Docker setup has been extensively optimized, achieving **outstanding performance improvements** from the original implementation: - -### **Build Time Improvements** - -- **Fresh Builds:** 108-115 seconds (under 2 minutes) -- **Cached Builds:** 0.3 seconds (99.7% improvement) -- **Regression Consistency:** <5ms variance across builds - -### **Image Size Optimizations** - -- **Production Image:** ~500MB (80% size reduction from ~2.5GB) -- **Development Image:** ~2GB (33% size reduction from ~3GB) -- **Deployment Speed:** 5-8x faster due to smaller images - -### **Key Optimizations Applied** - -- ✅ Fixed critical `chown` performance issues (60+ second reduction) -- ✅ Implemented aggressive multi-stage builds -- ✅ Optimized Docker layer caching (380x cache improvement) -- ✅ Added comprehensive cleanup and size reduction -- ✅ Enhanced safety with targeted resource management -- ✅ **Unified Docker toolkit** - Single script for all operations (testing, monitoring, cleanup) - -## 📋 Quick Start - -### **🐳 Unified Docker Toolkit** - -All Docker operations are now available through a single, powerful script: - -```bash -# Quick validation (2-3 min) -./scripts/docker-toolkit.sh quick - -# Standard testing (5-7 min) -./scripts/docker-toolkit.sh test - -# Comprehensive testing (15-20 min) -./scripts/docker-toolkit.sh comprehensive - -# Monitor container resources -./scripts/docker-toolkit.sh monitor [container] [duration] [interval] - -# Safe cleanup operations -./scripts/docker-toolkit.sh cleanup [--dry-run] [--force] [--volumes] - -# Get help -./scripts/docker-toolkit.sh help -``` - -### **Development Workflow** - -```bash -# Start development environment -poetry run tux --dev docker up - -# Monitor logs -poetry run tux --dev docker logs -f - -# Execute commands in container -poetry run tux --dev docker exec tux bash - -# Stop environment -poetry run tux --dev docker down -``` - -### **Production Deployment** - -```bash -# Build and start production -poetry run tux docker build -poetry run tux docker up -d - -# Check health status -poetry run tux docker ps - -# View logs -poetry run tux docker logs -f -``` - -## 🧪 Testing Strategy - -We have a comprehensive 3-tier testing approach: - -### **Tier 1: Quick Validation (2-3 minutes)** - -```bash -./scripts/docker-toolkit.sh quick -``` - -**Use for:** Daily development, pre-commit validation - -### **Tier 2: Standard Testing (5-7 minutes)** - -```bash -./scripts/docker-toolkit.sh test - -# With custom thresholds -BUILD_THRESHOLD=180000 MEMORY_THRESHOLD=256 ./scripts/docker-toolkit.sh test - -# Force fresh builds -./scripts/docker-toolkit.sh test --no-cache --force-clean -``` - -**Use for:** Performance validation, before releases - -### **Tier 3: Comprehensive Testing (15-20 minutes)** - -```bash -./scripts/docker-toolkit.sh comprehensive -``` - -**Use for:** Major changes, full regression testing, pre-release validation - -### **When to Use Each Test Tier** - -| Scenario | Quick | Standard | Comprehensive | -|----------|-------|----------|---------------| -| **Daily development** | ✅ | | | -| **Before commit** | ✅ | | | -| **Docker file changes** | | ✅ | | -| **Performance investigation** | | ✅ | | -| **Before release** | | ✅ | ✅ | -| **CI/CD pipeline** | | ✅ | | -| **Major refactoring** | | | ✅ | -| **New developer onboarding** | | | ✅ | -| **Production deployment** | | ✅ | | -| **Issue investigation** | | ✅ | ✅ | - -### **Performance Thresholds** - -All tests validate against configurable thresholds: - -- **Build Time:** < 300s (5 minutes) - `BUILD_THRESHOLD` -- **Startup Time:** < 10s - `STARTUP_THRESHOLD` -- **Memory Usage:** < 512MB - `MEMORY_THRESHOLD` -- **Python Validation:** < 5s - `PYTHON_THRESHOLD` - -## 🏗️ Architecture Overview - -### **Multi-Stage Dockerfile** - -```dockerfile -FROM python:3.13.5-slim AS base # Common runtime base -FROM base AS build # Build dependencies & tools -FROM build AS dev # Development environment -FROM python:3.13.5-slim AS production # Minimal production runtime -``` - -### **Key Features** - -- **Non-root execution** (UID 1001) -- **Read-only root filesystem** (production) -- **Optimized layer caching** -- **Aggressive size reduction** -- **Security-first design** - -## 🛡️ Security Features - -### **Container Security** - -- ✅ **Non-root user execution** (UID 1001, GID 1001) -- ✅ **Read-only root filesystem** (production) -- ✅ **Security options:** `no-new-privileges:true` -- ✅ **Resource limits:** Memory and CPU constraints -- ✅ **Temporary filesystems:** Controlled temp access - -### **Build Security** - -- ✅ **Multi-stage separation** (build tools excluded from production) -- ✅ **Dependency locking** (Poetry with `poetry.lock`) -- ✅ **Vulnerability scanning** (Docker Scout integration) -- ✅ **Minimal attack surface** (slim base images) - -### **File System Access** - -```bash -# Application temp directory (persistent) -/app/temp/ # Writable, survives restarts - -# System temp directories (ephemeral) -/tmp/ # tmpfs, cleared on restart -/var/tmp/ # tmpfs, cleared on restart -``` - -### **Security Checklist** - -Use this checklist to validate security compliance: - -- [ ] ✅ Environment variables via `.env` file (never in Dockerfile) -- [ ] ✅ Regular base image updates scheduled -- [ ] ✅ Vulnerability scanning in CI/CD pipeline -- [ ] ✅ Non-root user execution verified -- [ ] ✅ Read-only root filesystem enabled (production) -- [ ] ✅ Resource limits configured -- [ ] ✅ Health checks implemented -- [ ] ✅ Minimal package installation used -- [ ] ✅ No secrets embedded in images -- [ ] ✅ Log rotation configured - -### **Temp File Usage Pattern** - -```python -import tempfile -import os - -# For persistent temp files (across container restarts) -TEMP_DIR = "/app/temp" -os.makedirs(TEMP_DIR, exist_ok=True) - -# For ephemeral temp files (cleared on restart) -with tempfile.NamedTemporaryFile(dir="/tmp") as tmp_file: - # Use tmp_file for short-lived operations - pass -``` - -## 🔧 Development Features - -### **File Watching & Hot Reload** - -```yaml -# docker-compose.dev.yml -develop: - watch: - - action: sync # Instant file sync - path: . - target: /app/ - - action: rebuild # Rebuild triggers - path: pyproject.toml - - action: rebuild - path: prisma/schema/ -``` - -### **Development Tools** - -- **Live code reloading** with file sync -- **Schema change detection** and auto-rebuild -- **Dependency change handling** -- **Interactive debugging support** - -## 📊 Performance Monitoring - -### **Automated Metrics Collection** - -All test scripts generate detailed performance data: - -```bash -# View latest metrics -cat logs/docker-metrics-*.json - -# Comprehensive test results -cat logs/comprehensive-test-*/test-report.md - -# Performance trends -jq '.performance | to_entries[] | "\(.key): \(.value.value) \(.value.unit)"' logs/docker-metrics-*.json -``` - -### **Key Metrics Tracked** - -- Build times (fresh vs cached) -- Container startup performance -- Memory usage patterns -- Image sizes and layer counts -- Security scan results -- File operation performance - -## 🔄 Environment Management - -### **Environment Switching** - -```bash -# Development mode (default) -poetry run tux --dev docker up - -# Production mode -poetry run tux --prod docker up - -# CLI environment flags -poetry run tux --dev docker build # Development build -poetry run tux --prod docker build # Production build -``` - -### **Configuration Files** - -- **`docker-compose.yml`** - Production configuration -- **`docker-compose.dev.yml`** - Development overrides -- **`Dockerfile`** - Multi-stage build definition -- **`.dockerignore`** - Build context optimization - -## 🧹 Safe Cleanup Operations - -### **Automated Safe Cleanup** - -```bash -# Preview cleanup (safe) -poetry run tux docker cleanup --dry-run - -# Remove tux resources only -poetry run tux docker cleanup --force --volumes - -# Standard test with cleanup -./scripts/docker-toolkit.sh test --force-clean - -# Monitor container resources -./scripts/docker-toolkit.sh monitor tux-dev 120 10 -``` - -### **Safety Guarantees** - -- ✅ **Only removes tux-related resources** -- ✅ **Preserves system images** (python, ubuntu, etc.) -- ✅ **Protects CI/CD environments** -- ✅ **Specific pattern matching** (no wildcards) - -### **Protected Resources** - -```bash -# NEVER removed (protected): -python:* # Base Python images -ubuntu:* # Ubuntu system images -postgres:* # Database images -System containers # Non-tux containers -System volumes # System-created volumes -``` - -### **Safety Verification** - -Verify that cleanup operations only affect tux resources: - -```bash -# Before cleanup - note system images -docker images | grep -E "(python|ubuntu|alpine)" > /tmp/before_images.txt - -# Run safe cleanup -poetry run tux docker cleanup --force --volumes - -# After cleanup - verify system images still present -docker images | grep -E "(python|ubuntu|alpine)" > /tmp/after_images.txt - -# Compare (should be identical) -diff /tmp/before_images.txt /tmp/after_images.txt -``` - -**Expected result:** No differences - all system images preserved. - -### **Dangerous Commands to NEVER Use** - -```bash -# ❌ NEVER USE THESE: -docker system prune -af --volumes # Removes ALL system resources -docker system prune -af # Removes ALL unused resources -docker volume prune -f # Removes ALL unused volumes -docker network prune -f # Removes ALL unused networks -docker container prune -f # Removes ALL stopped containers -``` - -## 📈 Performance Baselines - -### **Expected Performance Targets** - -| Metric | Development | Production | Threshold | -|--------|-------------|------------|-----------| -| **Fresh Build** | ~108s | ~115s | < 300s | -| **Cached Build** | ~0.3s | ~0.3s | < 60s | -| **Container Startup** | < 5s | < 3s | < 10s | -| **Memory Usage** | < 1GB | < 512MB | Configurable | -| **Image Size** | ~2GB | ~500MB | Monitored | - -### **Performance Alerts** - -```bash -# Check for regressions -if [ "$build_time" -gt 180000 ]; then - echo "⚠️ WARNING: Build time exceeded 3 minutes" -fi -``` - -## 🏥 Health Checks & Monitoring - -### **Health Check Configuration** - -```yaml -healthcheck: - test: ["CMD", "python", "-c", "import sys; sys.exit(0)"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 40s -``` - -### **Monitoring Commands** - -```bash -# Health status -poetry run tux docker health - -# Resource usage -docker stats tux - -# Container logs -poetry run tux docker logs -f - -# System overview -docker system df -``` - -## 🚨 Troubleshooting - -### **Common Issues & Solutions** - -#### **Build Failures** - -```bash -# Clean build cache -docker builder prune -f - -# Rebuild without cache -poetry run tux docker build --no-cache -``` - -#### **Permission Issues** - -```bash -# Check container user -docker run --rm tux:prod whoami # Should output: nonroot - -# Verify file permissions -docker run --rm tux:prod ls -la /app -``` - -#### **Performance Issues** - -```bash -# Run performance diagnostics -./scripts/docker-toolkit.sh test - -# Quick validation -./scripts/docker-toolkit.sh quick - -# Check resource usage -docker stats --format "table {{.Name}}\t{{.CPUPerc}}\t{{.MemUsage}}" -``` - -#### **File Watching Not Working** - -```bash -# Restart with rebuild -poetry run tux --dev docker up --build - -# Check sync logs -docker compose -f docker-compose.dev.yml logs -f - -# Test file sync manually -echo "# Test change $(date)" > test_file.py -docker compose -f docker-compose.dev.yml exec tux test -f /app/test_file.py -rm test_file.py -``` - -#### **Prisma Issues** - -```bash -# Regenerate Prisma client -poetry run tux --dev docker exec tux poetry run prisma generate - -# Check Prisma binaries -poetry run tux --dev docker exec tux ls -la .venv/lib/python*/site-packages/prisma - -# Test database operations -poetry run tux --dev docker exec tux poetry run prisma db push --accept-data-loss -``` - -#### **Memory and Resource Issues** - -```bash -# Monitor resource usage over time -docker stats --format "table {{.Name}}\t{{.CPUPerc}}\t{{.MemUsage}}\t{{.MemPerc}}" tux - -# Test with lower memory limits -docker run --rm --memory=256m tux:prod python -c "print('Memory test OK')" - -# Check for memory leaks -docker run -d --name memory-test tux:prod sleep 60 -for i in {1..10}; do docker stats --no-stream memory-test; sleep 5; done -docker stop memory-test && docker rm memory-test -``` - -### **Emergency Cleanup** - -```bash -# Safe emergency cleanup -poetry run tux docker cleanup --force --volumes -docker builder prune -f - -# Check system state -docker system df -docker images - -# Manual image restoration if needed -docker pull python:3.13.5-slim -docker pull ubuntu:22.04 -``` - -## 📚 Advanced Usage - -### **Custom Build Arguments** - -```bash -# Build specific stage -docker build --target dev -t tux:dev . -docker build --target production -t tux:prod . - -# Build with custom args -docker build --build-arg DEVCONTAINER=1 . -``` - -### **Multi-Platform Builds** - -```bash -# Build for amd64 only -docker buildx build --platform linux/amd64 . -``` - -### **Security Scanning** - -```bash -# Run vulnerability scan -docker scout cves tux:prod --only-severity critical,high -``` - -## 🎯 Best Practices - -### **Development Workflow Best Practices** - -1. **Daily:** Run quick validation tests -2. **Before commits:** Validate Docker changes -3. **Before releases:** Run comprehensive tests -4. **Regular cleanup:** Use safe cleanup commands - -### **Production Deployment Best Practices** - -1. **Build production images** with specific tags -2. **Run security scans** before deployment -3. **Monitor resource usage** and health checks -4. **Set up log aggregation** and monitoring - -### **Performance Optimization** - -1. **Use cached builds** for development -2. **Monitor build times** for regressions -3. **Keep images small** with multi-stage builds -4. **Regular performance testing** with metrics - -## 📊 Metrics & Reporting - -### **Automated Reporting** - -```bash -# Generate performance report -./scripts/docker-toolkit.sh comprehensive - -# View detailed results -cat logs/comprehensive-test-*/test-report.md - -# Export metrics for analysis -jq '.' logs/docker-metrics-*.json > performance-data.json -``` - -### **CI/CD Integration** - -```yaml -# GitHub Actions example -- name: Docker Performance Test - run: ./scripts/docker-toolkit.sh test - -- name: Security Scan - run: docker scout cves --exit-code --only-severity critical,high -``` - -### **Common Failure Scenarios to Test** - -Regularly test these failure scenarios to ensure robustness: - -1. **Out of disk space during build** -2. **Network timeout during dependency installation** -3. **Invalid Dockerfile syntax** -4. **Missing environment variables** -5. **Port conflicts between environments** -6. **Permission denied errors** -7. **Resource limit exceeded** -8. **Corrupted Docker cache** -9. **Invalid compose configuration** -10. **Missing base images** - -```bash -# Example: Test low memory handling -docker run --rm --memory=10m tux:prod echo "Low memory test" || echo "✅ Handled gracefully" - -# Example: Test invalid config -cp .env .env.backup -echo "INVALID_VAR=" >> .env -docker compose config || echo "✅ Invalid config detected" -mv .env.backup .env -``` - -## 🎉 Success Metrics - -Our optimized Docker setup achieves: - -### **Performance Achievements** - -- ✅ **99.7% cache improvement** (115s → 0.3s) -- ✅ **80% image size reduction** (2.5GB → 500MB) -- ✅ **36% faster fresh builds** (180s → 115s) -- ✅ **380x faster cached builds** - -### **Safety & Reliability** - -- ✅ **100% safe cleanup operations** -- ✅ **Zero system resource conflicts** -- ✅ **Comprehensive error handling** -- ✅ **Automated regression testing** - -### **Developer Experience** - -- ✅ **2.3 hours/week time savings** per developer -- ✅ **5-8x faster deployments** -- ✅ **Instant file synchronization** -- ✅ **Reliable, consistent performance** - -## 📞 Support & Maintenance - -### **Regular Maintenance** - -- **Weekly:** Review performance metrics -- **Monthly:** Update base images -- **Quarterly:** Comprehensive performance review -- **As needed:** Security updates and patches - -### **Getting Help** - -1. **Check logs:** `docker logs` and test outputs -2. **Run diagnostics:** Performance and health scripts -3. **Review documentation:** This guide and linked resources -4. **Use cleanup tools:** Safe cleanup operations via the toolkit - ---- - -## 📂 Related Documentation - -- **[DEVELOPER.md](DEVELOPER.md)** - General development setup and prerequisites -- **[Dockerfile](Dockerfile)** - Multi-stage build definition -- **[docker-compose.yml](docker-compose.yml)** - Production configuration -- **[docker-compose.dev.yml](docker-compose.dev.yml)** - Development overrides -- **[scripts/docker-toolkit.sh](scripts/docker-toolkit.sh)** - Unified Docker toolkit (all operations) - -**This Docker setup represents a complete transformation from the original implementation, delivering exceptional performance, security, and developer experience.** 🚀 diff --git a/Dockerfile b/Dockerfile index 0b7fb2ced..24bf9e639 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,467 +1,227 @@ -# ============================================================================== -# TUX DISCORD BOT - MULTI-STAGE DOCKERFILE -# ============================================================================== -# -# This Dockerfile uses a multi-stage build approach to create optimized images -# for different use cases while maintaining consistency across environments. -# -# STAGES: -# ------- -# 1. base - Common foundation with runtime dependencies -# 2. build - Development tools and dependency installation -# 3. dev - Development environment with debugging tools -# 4. production - Minimal, secure runtime environment -# -# USAGE: -# ------ -# Development: docker-compose -f docker-compose.dev.yml up -# Production: docker build --target production -t tux:latest . -# With version: docker build --build-arg VERSION=$(git describe --tags --always --dirty | sed 's/^v//') -t tux:latest . -# -# SECURITY FEATURES: -# ------------------ -# - Non-root user execution (uid/gid 1001) -# - Read-only filesystem support via tmpfs mounts -# - Minimal attack surface (only required dependencies) -# - Pinned package versions for reproducibility -# - Health checks for container monitoring -# -# SIZE OPTIMIZATION: -# ------------------ -# - Multi-stage builds to exclude build tools from final image -# - Aggressive cleanup of unnecessary files (~73% size reduction) -# - Efficient layer caching through strategic COPY ordering -# - Loop-based cleanup to reduce Dockerfile complexity -# -# ============================================================================== - -# ============================================================================== -# BASE STAGE - Common Foundation -# ============================================================================== -# Purpose: Establishes the common base for all subsequent stages -# Contains: Python runtime, essential system dependencies, security setup -# Size Impact: ~150MB (Python slim + runtime deps) -# ============================================================================== - FROM python:3.13.7-slim@sha256:27f90d79cc85e9b7b2560063ef44fa0e9eaae7a7c3f5a9f74563065c5477cc24 AS base -# OCI Labels for container metadata and registry compliance -# These labels provide important metadata for container registries and tools LABEL org.opencontainers.image.source="https://github.com/allthingslinux/tux" \ - org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" \ - org.opencontainers.image.licenses="GPL-3.0" \ - org.opencontainers.image.authors="All Things Linux" \ - org.opencontainers.image.vendor="All Things Linux" \ - org.opencontainers.image.title="Tux" \ - org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" - -# Create non-root user early for security best practices -# Using system user (no login shell) with fixed UID/GID for consistency -# UID/GID 1001 is commonly used for application users in containers + org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" \ + org.opencontainers.image.licenses="GPL-3.0" \ + org.opencontainers.image.authors="All Things Linux" \ + org.opencontainers.image.vendor="All Things Linux" \ + org.opencontainers.image.title="Tux" \ + org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" + RUN groupadd --system --gid 1001 nonroot && \ - useradd --create-home --system --uid 1001 --gid nonroot nonroot + useradd --create-home --system --uid 1001 --gid nonroot nonroot -# Configure apt to avoid documentation and interactive prompts ENV DEBIAN_FRONTEND=noninteractive \ - DEBCONF_NONINTERACTIVE_SEEN=true + DEBCONF_NONINTERACTIVE_SEEN=true -# Configure dpkg to exclude documentation (reduces size and avoids man page issues) RUN echo 'path-exclude /usr/share/doc/*' > /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-include /usr/share/doc/*/copyright' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/man/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/groff/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/info/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/lintian/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/linda/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc - -# Install runtime dependencies required for the application -# SECURITY: Update all packages first to get latest security patches, then install specific versions -# PERFORMANCE: Packages sorted alphabetically for better caching and maintenance -# NOTE: These are the minimal dependencies required for the bot to function + echo 'path-include /usr/share/doc/*/copyright' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/man/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/groff/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/info/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/lintian/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/linda/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc + +# hadolint ignore=DL3008 RUN apt-get update && \ - apt-get upgrade -y && \ - apt-get install -y --no-install-recommends --no-install-suggests \ + apt-get upgrade -y && \ + apt-get install -y --no-install-recommends --no-install-suggests \ git \ libcairo2 \ libgdk-pixbuf-2.0-0 \ libpango-1.0-0 \ libpangocairo-1.0-0 \ shared-mime-info \ - # Cleanup package manager caches to reduce layer size - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Python environment optimization for containerized execution -# These settings improve performance and reduce container overhead - -# PYTHONUNBUFFERED=1 : Forces stdout/stderr to be unbuffered for real-time logs -# PYTHONDONTWRITEBYTECODE=1 : Prevents .pyc file generation (reduces I/O and size) -# PIP_DISABLE_PIP_VERSION_CHECK : Prevents pip from checking for updates (faster) -# PIP_NO_CACHE_DIR=1 : Disables pip caching (reduces container size) + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* ENV PYTHONUNBUFFERED=1 \ - PYTHONDONTWRITEBYTECODE=1 \ - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_NO_CACHE_DIR=1 - -# ============================================================================== -# BUILD STAGE - Development Tools and Dependency Installation -# ============================================================================== -# Purpose: Installs build tools, Poetry, and application dependencies -# Contains: Compilers, headers, build tools, complete Python environment -# Size Impact: ~1.3GB (includes all build dependencies and Python packages) -# ============================================================================== + PYTHONDONTWRITEBYTECODE=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_NO_CACHE_DIR=1 FROM base AS build -# Install build dependencies required for compiling Python packages with C extensions -# These tools are needed for packages like cryptography, pillow, etc. -# MAINTENANCE: Keep versions pinned and sorted alphabetically +# hadolint ignore=DL3008 RUN apt-get update && \ - apt-get upgrade -y && \ - apt-get install -y --no-install-recommends \ - # GCC compiler and build essentials for native extensions + apt-get upgrade -y && \ + apt-get install -y --no-install-recommends \ build-essential \ - # Additional utilities required by some Python packages findutils \ - # Development headers for graphics libraries libcairo2-dev \ - # Foreign Function Interface library for Python extensions libffi8 \ - # Cleanup to reduce intermediate layer size - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Poetry configuration for dependency management -# These settings optimize Poetry for containerized builds - -# POETRY_NO_INTERACTION=1 : Disables interactive prompts for CI/CD -# POETRY_VIRTUALENVS_CREATE=1 : Ensures virtual environment creation -# POETRY_VIRTUALENVS_IN_PROJECT=1: Creates .venv in project directory -# POETRY_CACHE_DIR=/tmp/poetry_cache: Uses temporary directory for cache -# POETRY_INSTALLER_PARALLEL=true : Enables parallel package installation - -ENV POETRY_VERSION=2.1.1 \ - POETRY_NO_INTERACTION=1 \ - POETRY_VIRTUALENVS_CREATE=1 \ - POETRY_VIRTUALENVS_IN_PROJECT=1 \ - POETRY_CACHE_DIR=/tmp/poetry_cache \ - POETRY_INSTALLER_PARALLEL=true - -# Install Poetry using pip with BuildKit cache mount for efficiency -# Cache mount prevents re-downloading Poetry on subsequent builds -RUN --mount=type=cache,target=/root/.cache \ - pip install poetry==$POETRY_VERSION - -# Set working directory for all subsequent operations -WORKDIR /app + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* -# Set shell to bash with pipefail for proper error handling in pipes -# This must be set before any RUN commands that use pipes -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +ENV UV_VERSION=0.8.0 -# Copy dependency files first for optimal Docker layer caching -# Changes to these files will invalidate subsequent layers -# OPTIMIZATION: This pattern maximizes cache hits during development -COPY pyproject.toml poetry.lock ./ +RUN pip install uv==$UV_VERSION -# Install Python dependencies using Poetry -# PERFORMANCE: Cache mount speeds up subsequent builds -# SECURITY: --only main excludes development dependencies from production -# NOTE: Install dependencies only first, package itself will be installed later with git context -RUN --mount=type=cache,target=$POETRY_CACHE_DIR \ - --mount=type=cache,target=/root/.cache/pip \ - poetry install --only main --no-root --no-directory +WORKDIR /app + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] -# Copy application files in order of change frequency (Docker layer optimization) -# STRATEGY: Files that change less frequently are copied first to maximize cache reuse +COPY pyproject.toml uv.lock ./ -# 1. Configuration files (rarely change) -# These are typically static configuration that changes infrequently -COPY config/ ./config/ +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --locked --no-install-project -# 2. Database schema files (change infrequently) -# Prisma schema and migrations are relatively stable -COPY prisma/ ./prisma/ +COPY src/tux/database/migrations/ ./src/tux/database/migrations/ -# 3. Main application code (changes more frequently) -# The core bot code is most likely to change during development -COPY tux/ ./tux/ +COPY src/ ./src/ +RUN cp -a src/tux ./tux -# 4. Root level files needed for installation -# These include metadata and licensing information -COPY README.md LICENSE pyproject.toml ./ +COPY README.md LICENSE pyproject.toml alembic.ini ./ +COPY scripts/ ./scripts/ -# Build arguments for version information -# These allow passing version info without requiring git history in build context ARG VERSION="" ARG GIT_SHA="" ARG BUILD_DATE="" -# Generate version file using build args with fallback -# PERFORMANCE: Version is determined at build time, not runtime -# SECURITY: Git operations happen outside container, only VERSION string is passed in RUN set -eux; \ - if [ -n "$VERSION" ]; then \ - # Use provided version from build args (preferred for all builds) + if [ -n "$VERSION" ]; then \ echo "Using provided version: $VERSION"; \ echo "$VERSION" > /app/VERSION; \ - else \ - # Fallback for builds without version info - # NOTE: .git directory is excluded by .dockerignore for security/performance - # Version should be passed via --build-arg VERSION=$(git describe --tags --always --dirty | sed 's/^v//') + else \ echo "No version provided, using fallback"; \ echo "dev" > /app/VERSION; \ - fi; \ - echo "Building version: $(cat /app/VERSION)" - -# Install the application and generate Prisma client -# COMPLEXITY: This step requires multiple operations that must be done together -RUN --mount=type=cache,target=$POETRY_CACHE_DIR \ - --mount=type=cache,target=/root/.cache \ - # Install the application package itself - poetry install --only main - -# ============================================================================== -# DEVELOPMENT STAGE - Development Environment -# ============================================================================== -# Purpose: Provides a full development environment with tools and debugging capabilities -# Contains: All build tools, development dependencies, debugging utilities -# Target: Used by docker-compose.dev.yml for local development -# Size Impact: ~1.6GB (includes development dependencies and tools) -# ============================================================================== + fi; \ + echo "Building version: $(cat /app/VERSION)" + +# Sync the project +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --locked FROM build AS dev WORKDIR /app -# Build argument to conditionally install additional development tools -# Allows customization for different development environments (IDE, devcontainer, etc.) ARG DEVCONTAINER=0 ENV DEVCONTAINER=${DEVCONTAINER} +# hadolint ignore=DL3008 RUN set -eux; \ - # Conditionally install zsh for enhanced development experience - # Only installs if DEVCONTAINER build arg is set to 1 - if [ "$DEVCONTAINER" = "1" ]; then \ + if [ "$DEVCONTAINER" = "1" ]; then \ apt-get update && \ apt-get install -y --no-install-recommends zsh && \ chsh -s /usr/bin/zsh && \ apt-get clean && \ rm -rf /var/lib/apt/lists/*; \ - fi; \ -# Fix ownership of all application files for non-root user -# SECURITY: Ensures the application runs with proper permissions -COPY --from=build --chown=nonroot:nonroot /app /app + fi; \ + COPY --from=build --chown=nonroot:nonroot /app /app RUN set -eux; \ - # Create application cache and temporary directories - # These directories are used by the bot for caching and temporary files - mkdir -p /app/.cache/tldr /app/temp; \ - # Create user cache directories (fixes permission issues for Prisma/npm) - mkdir -p /home/nonroot/.cache /home/nonroot/.npm; \ -# Switch to non-root user for all subsequent operations -# SECURITY: Follows principle of least privilege + mkdir -p /app/.cache/tldr /app/temp; \ + mkdir -p /home/nonroot/.cache /home/nonroot/.npm; \ + chown -R nonroot:nonroot /app/.cache /app/temp /home/nonroot/.cache /home/nonroot/.npm; \ + chmod -R 755 /app/.cache /app/temp /home/nonroot/.cache /home/nonroot/.npm + +RUN uv sync --dev + +ENV VIRTUAL_ENV=/app/.venv \ + PATH="/app/.venv/bin:$PATH" \ + PYTHONPATH="/app" \ + PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 + USER nonroot -# Install development dependencies and setup Prisma -# DEVELOPMENT: These tools are needed for linting, testing, and development workflow -RUN poetry install --only dev --no-root --no-directory && \ - poetry run prisma py fetch && \ - poetry run prisma generate - -# Development container startup command -# WORKFLOW: Regenerates Prisma client and starts the bot in development mode -# This ensures the database client is always up-to-date with schema changes -CMD ["sh", "-c", "poetry run prisma generate && exec poetry run tux --dev start"] - -# ============================================================================== -# PRODUCTION STAGE - Minimal Runtime Environment -# ============================================================================== -# Purpose: Creates a minimal, secure, and optimized image for production deployment -# Contains: Only runtime dependencies, application code, and essential files -# Security: Non-root execution, minimal attack surface, health monitoring -# Size Impact: ~440MB (73% reduction from development image) -# ============================================================================== +COPY docker/entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh +CMD ["/entrypoint.sh"] FROM python:3.13.7-slim@sha256:27f90d79cc85e9b7b2560063ef44fa0e9eaae7a7c3f5a9f74563065c5477cc24 AS production -# Duplicate OCI labels for production image metadata -# COMPLIANCE: Ensures production images have proper metadata for registries LABEL org.opencontainers.image.source="https://github.com/allthingslinux/tux" \ - org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" \ - org.opencontainers.image.licenses="GPL-3.0" \ - org.opencontainers.image.authors="All Things Linux" \ - org.opencontainers.image.vendor="All Things Linux" \ - org.opencontainers.image.title="Tux" \ - org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" - -# Create non-root user (same as base stage) -# SECURITY: Consistent user across all stages for permission compatibility + org.opencontainers.image.description="Tux - The all in one discord bot for the All Things Linux Community" \ + org.opencontainers.image.licenses="GPL-3.0" \ + org.opencontainers.image.authors="All Things Linux" \ + org.opencontainers.image.vendor="All Things Linux" \ + org.opencontainers.image.title="Tux" \ + org.opencontainers.image.documentation="https://github.com/allthingslinux/tux/blob/main/README.md" + RUN groupadd --system --gid 1001 nonroot && \ - useradd --create-home --system --uid 1001 --gid nonroot nonroot + useradd --create-home --system --uid 1001 --gid nonroot nonroot -# Configure apt for production (same as base stage) ENV DEBIAN_FRONTEND=noninteractive \ - DEBCONF_NONINTERACTIVE_SEEN=true + DEBCONF_NONINTERACTIVE_SEEN=true -# Configure dpkg to exclude documentation (reduces size and avoids man page issues) RUN echo 'path-exclude /usr/share/doc/*' > /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-include /usr/share/doc/*/copyright' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/man/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/groff/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/info/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/lintian/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ - echo 'path-exclude /usr/share/linda/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc - -# Install ONLY runtime dependencies (minimal subset of base stage) -# SECURITY: Update all packages first, then install minimal runtime dependencies -# SIZE: Significantly smaller than build stage dependencies + echo 'path-include /usr/share/doc/*/copyright' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/man/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/groff/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/info/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc && \ + echo 'path-exclude /usr/share/lintian/*' >> /etc/dpkg/dpkg.cfg.d/01_nodoc + +# hadolint ignore=DL3008 RUN apt-get update && \ - apt-get upgrade -y && \ - apt-get install -y --no-install-recommends --no-install-suggests \ + apt-get upgrade -y && \ + apt-get install -y --no-install-recommends --no-install-suggests \ libcairo2 \ libffi8 \ coreutils \ - # Aggressive cleanup to minimize image size - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && rm -rf /var/cache/apt/* \ - && rm -rf /tmp/* \ - && rm -rf /var/tmp/* + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* \ + && rm -rf /var/cache/apt/* \ + && rm -rf /tmp/* \ + && rm -rf /var/tmp/* WORKDIR /app -# Production environment configuration -# OPTIMIZATION: Settings tuned for production performance and security - -# VIRTUAL_ENV=/app/.venv : Points to the virtual environment -# PATH="/app/.venv/bin:$PATH" : Ensures venv binaries are found first -# PYTHONPATH="/app" : Allows imports from the app directory -# PYTHONOPTIMIZE=2 : Maximum Python bytecode optimization -# Other vars inherited from base stage for consistency - ENV VIRTUAL_ENV=/app/.venv \ - PATH="/app/.venv/bin:$PATH" \ - PYTHONPATH="/app" \ - PYTHONOPTIMIZE=2 \ - PYTHONUNBUFFERED=1 \ - PYTHONDONTWRITEBYTECODE=1 \ - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_NO_CACHE_DIR=1 - -# Copy essential files from build stage with proper ownership -# SECURITY: --chown ensures files are owned by non-root user -# EFFICIENCY: Only copies what's needed for runtime + PATH="/app/.venv/bin:$PATH" \ + PYTHONPATH="/app:/app/src" \ + PYTHONOPTIMIZE=2 \ + PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_NO_CACHE_DIR=1 + COPY --from=build --chown=nonroot:nonroot /app/.venv /app/.venv COPY --from=build --chown=nonroot:nonroot /app/tux /app/tux -COPY --from=build --chown=nonroot:nonroot /app/prisma /app/prisma -COPY --from=build --chown=nonroot:nonroot /app/config /app/config +COPY --from=build --chown=nonroot:nonroot /app/src /app/src COPY --from=build --chown=nonroot:nonroot /app/pyproject.toml /app/pyproject.toml COPY --from=build --chown=nonroot:nonroot /app/VERSION /app/VERSION +COPY --from=build --chown=nonroot:nonroot /app/alembic.ini /app/alembic.ini +COPY --from=build --chown=nonroot:nonroot /app/scripts /app/scripts -# Create convenient symlinks for Python and application binaries -# USABILITY: Allows running 'python' and 'tux' commands without full paths -# COMPATIBILITY: Maintains expected command locations for scripts and debugging RUN ln -sf /app/.venv/bin/python /usr/local/bin/python && \ - ln -sf /app/.venv/bin/tux /usr/local/bin/tux + ln -sf /app/.venv/bin/tux /usr/local/bin/tux RUN set -eux; \ - mkdir -p /app/.cache/tldr /app/temp; \ - mkdir -p /home/nonroot/.cache /home/nonroot/.npm; \ - rm -rf /home/nonroot/.npm/_cacache_; \ - chown nonroot:nonroot /app/.cache /app/temp /home/nonroot/.cache /home/nonroot/.npm + mkdir -p /app/.cache/tldr /app/temp; \ + mkdir -p /home/nonroot/.cache /home/nonroot/.npm; \ + rm -rf /home/nonroot/.npm/_cacache_; \ + chown -R nonroot:nonroot /app/.cache /app/temp /home/nonroot/.cache /home/nonroot/.npm; \ + chmod -R 755 /app/.cache /app/temp /home/nonroot/.cache /home/nonroot/.npm -# Switch to non-root user and finalize Prisma binaries USER nonroot -RUN /app/.venv/bin/python -m prisma py fetch \ - && /app/.venv/bin/python -m prisma generate USER root -# Aggressive cleanup and optimization after Prisma setup -# PERFORMANCE: Single RUN reduces layer count and enables atomic cleanup -# SIZE: Removes unnecessary files to minimize final image size but preserves Prisma binaries + RUN set -eux; \ - # VIRTUAL ENVIRONMENT CLEANUP - # The following operations remove unnecessary files from the Python environment - # This can reduce the size by 30-50MB without affecting functionality - # Remove Python bytecode files (will be regenerated as needed) - find /app/.venv -name "*.pyc" -delete; \ - find /app/.venv -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true; \ - # Remove test directories from installed packages (but preserve prisma binaries) - # These directories contain test files that are not needed in production - for test_dir in tests testing "test*"; do \ - find /app/.venv -name "$test_dir" -type d -not -path "*/prisma*" -exec rm -rf {} + 2>/dev/null || true; \ - done; \ - # Remove documentation files from installed packages (but preserve prisma docs) - # These files take up significant space and are not needed in production - for doc_pattern in "*.md" "*.txt" "*.rst" "LICENSE*" "NOTICE*" "COPYING*" "CHANGELOG*" "README*" "HISTORY*" "AUTHORS*" "CONTRIBUTORS*"; do \ - find /app/.venv -name "$doc_pattern" -not -path "*/prisma*" -delete 2>/dev/null || true; \ - done; \ - # Remove large development packages that are not needed in production - # These packages (pip, setuptools, wheel) are only needed for installing packages - # NOTE: Preserving packages that Prisma might need - for pkg in setuptools wheel pkg_resources; do \ - rm -rf /app/.venv/lib/python3.13/site-packages/${pkg}* 2>/dev/null || true; \ - rm -rf /app/.venv/bin/${pkg}* 2>/dev/null || true; \ - done; \ - rm -rf /app/.venv/bin/easy_install* 2>/dev/null || true; \ - # Compile Python bytecode for performance optimization - # PERFORMANCE: Pre-compiled bytecode improves startup time - # Note: Some compilation errors are expected and ignored - /app/.venv/bin/python -m compileall -b -q /app/tux /app/.venv/lib/python3.13/site-packages 2>/dev/null || true - -# Switch back to non-root user for runtime + find /app/.venv -name "*.pyc" -delete; \ + find /app/.venv -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true; \ + for test_dir in tests testing "test*"; do \ + find /app/.venv -name "$test_dir" -type d -not -path "*/prisma*" -exec rm -rf {} + 2>/dev/null || true; \ + done; \ + for doc_pattern in "*.md" "*.txt" "*.rst" "LICENSE*" "NOTICE*" "COPYING*" "CHANGELOG*" "README*" "HISTORY*" "AUTHORS*" "CONTRIBUTORS*"; do \ + find /app/.venv -name "$doc_pattern" -not -path "*/prisma*" -delete 2>/dev/null || true; \ + done; \ + for pkg in setuptools wheel pkg_resources; do \ + rm -rf /app/.venv/lib/python3.13/site-packages/${pkg}* 2>/dev/null || true; \ + rm -rf /app/.venv/bin/${pkg}* 2>/dev/null || true; \ + done; \ + rm -rf /app/.venv/bin/easy_install* 2>/dev/null || true; \ + /app/.venv/bin/python -m compileall -b -q /app/tux /app/.venv/lib/python3.13/site-packages 2>/dev/null || true + USER nonroot -# Health check configuration for container orchestration -# MONITORING: Allows Docker/Kubernetes to monitor application health -# RELIABILITY: Enables automatic restart of unhealthy containers HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ - CMD python -c "import tux.cli.core; import tux.utils.env; print('Health check passed')" || exit 1 - -# --interval=30s : Check health every 30 seconds -# --timeout=10s : Allow 10 seconds for health check to complete -# --start-period=40s: Wait 40 seconds before first health check (startup time) -# --retries=3 : Mark unhealthy after 3 consecutive failures - -# Application entry point and default command -# DEPLOYMENT: Configures how the container starts in production -ENTRYPOINT ["tux"] -CMD ["--prod", "start"] - -# ENTRYPOINT ["tux"] : Always runs the tux command -# CMD ["--prod", "start"]: Default arguments for production mode -# FLEXIBILITY: CMD can be overridden, ENTRYPOINT cannot (security) - -# ============================================================================== -# DOCKERFILE BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. MULTI-STAGE BUILDS: Separates build and runtime environments -# 2. LAYER OPTIMIZATION: Ordered operations to maximize cache hits -# 3. SECURITY: Non-root user, pinned versions, minimal attack surface -# 4. SIZE OPTIMIZATION: Aggressive cleanup, minimal dependencies -# 5. MAINTAINABILITY: Comprehensive documentation, organized structure -# 6. RELIABILITY: Health checks, proper error handling -# 7. PERFORMANCE: Optimized Python settings, pre-compiled bytecode -# 8. COMPLIANCE: OCI labels, standard conventions -# -# USAGE EXAMPLES: -# --------------- -# Build production image: -# docker build --target production -t tux:latest . -# -# Build development image: -# docker build --target dev -t tux:dev . -# -# Build with devcontainer tools: -# docker build --target dev --build-arg DEVCONTAINER=1 -t tux:devcontainer . -# -# Run production container: -# docker run -d --name tux-bot --env-file .env tux:latest -# -# Run development container: -# docker-compose -f docker-compose.dev.yml up -# -# ============================================================================== + CMD python -c "import tux.shared.config.env; print('Health check passed')" || exit 1 + +COPY --chmod=755 docker/entrypoint.sh /entrypoint.sh +ENTRYPOINT ["/entrypoint.sh"] +CMD [] diff --git a/README.md b/README.md index c31ac8bd7..6764d5e16 100644 --- a/README.md +++ b/README.md @@ -25,8 +25,8 @@ Repo size Python - - Poetry + + Uv License @@ -39,10 +39,14 @@ ## Table of Contents +- [Table of Contents](#table-of-contents) - [About](#about) - [Tech Stack](#tech-stack) - [Bot Features](#bot-features) - [Installation and Development](#installation-and-development) + - [Prerequisites](#prerequisites) + - [Setup \& Workflow](#setup--workflow) + - [Quick Commands](#quick-commands) - [License](#license) - [Metrics](#metrics) - [Contributors](#contributors) @@ -56,12 +60,12 @@ It is designed to provide a variety of features to the server, including moderat ## Tech Stack - Python 3.13+ alongside the `discord.py` library -- Poetry for dependency management +- Uv for dependency management - Docker and Docker Compose for optional containerized environments - Strict typing with `basedpyright` and type hints -- Type safe ORM using `prisma` +- Type safe ORM using `SQLModel` with `SQLAlchemy` - Linting and formatting via `ruff` -- Custom CLI via `click` and `poetry` scripts +- Custom CLI via `typer` and `uv` scripts - Rich logging with `loguru` - Exception handling with `sentry-sdk` - Request handling with `httpx` @@ -77,16 +81,16 @@ It is designed to provide a variety of features to the server, including moderat - Robust error handling - Activity rotation - Custom help command -- Configuration system (`config/settings.yml.example`) +- Configuration system (environment variables + `.env` file) - Dynamic role-based (access level) permission system -- Basic extensions system (see [extensions](tux/extensions/README.md)) +- Plugin system (see [plugins](src/tux/plugins/README.md)) ## Installation and Development ### Prerequisites - Python 3.13+ -- [Poetry](https://python-poetry.org/docs/) +- [Uv](https://docs.astral.sh/uv/) - A PostgreSQL database (e.g. via [Supabase](https://supabase.io/) or local installation) - Optional: [Docker](https://docs.docker.com/get-docker/) & [Docker Compose](https://docs.docker.com/compose/install/) @@ -94,23 +98,72 @@ It is designed to provide a variety of features to the server, including moderat 1. **Clone the repository:** - ```bash - git clone https://github.com/allthingslinux/tux && cd tux - ``` + ```bash + git clone https://github.com/allthingslinux/tux.git + cd tux + ``` -2. **Follow the Developer Guide:** +2. **Install dependencies:** - For detailed instructions on setting up: - - your environment (local or Docker) - - installing dependencies - - configuring `.env` and `settings.yml` - - managing the database - - running the bot - - using hot-reloading - - linting/formatting - - understanding the `tux` CLI commands + ```bash + uv sync + ``` - ### Please refer to the **[DEVELOPER.md](DEVELOPER.md)** guide for more information +3. **Configure your environment:** + + ```bash + cp env.example .env + # Edit .env with your bot tokens and database URLs + ``` + +4. **Start the bot:** + + ```bash + # Start the bot (auto-detects environment, defaults to development) + uv run tux start + + # Start with debug mode + uv run tux start --debug + ``` + +### Quick Commands + +```bash +# Development +uv run tux start # Start bot in development mode +uv run tux start --debug # Start bot with debug mode +uv run dev lint # Check code quality with Ruff +uv run dev format # Format code with Ruff +uv run dev type-check # Check types with basedpyright +uv run dev pre-commit # Run pre-commit checks +uv run dev all # Run all development checks + +# Testing +uv run test run # Run tests with coverage +uv run test quick # Run tests without coverage (faster) +uv run test html # Run tests and generate HTML report +uv run test coverage # Generate coverage reports + +# Database +uv run db migrate-dev # Create and apply migrations for development +uv run db migrate-push # Push pending migrations to database +uv run db migrate-generate "message" # Generate a new migration +uv run db health # Check database health + +# Docker +uv run docker up # Start Docker services +uv run docker down # Stop Docker services +uv run docker build # Build Docker images +uv run docker logs # Show Docker service logs +uv run docker ps # List running containers +uv run docker shell # Open shell in container +``` + +**For detailed setup instructions, see [SETUP.md](SETUP.md)** + +**For developer information, see [DEVELOPER.md](DEVELOPER.md)** + +**For configuration documentation, see [CONFIG.md](CONFIG.md)** ## License diff --git a/VERSIONING.md b/VERSIONING.md index ed9a00587..79ce25337 100644 --- a/VERSIONING.md +++ b/VERSIONING.md @@ -10,11 +10,28 @@ We follow the [Semantic Versioning (SemVer)](https://semver.org/) specification - **MINOR**: Incremented for new, backward-compatible functionality. - **PATCH**: Incremented for backward-compatible bug fixes. -Release candidates can be denoted with suffixes (e.g., `1.0.0-rc1`). +Release candidates can be denoted with suffixes (e.g., `1.0.0-rc.1`). + +## Unified Version System + +The Tux project uses a **unified version system** (`src/tux/shared/version.py`) that provides a single source of truth for all version-related functionality. This system is designed to be: + +- **DRY (Don't Repeat Yourself)**: All version logic is centralized in one module +- **Seamless**: Works consistently across all environments (development, Docker, CI/CD) +- **Professional**: Robust error handling, caching, and type safety +- **Testable**: Clean, focused tests without complex mocking + +### Key Features + +- **Version Detection**: Automatic detection from multiple sources with clear priority +- **Semantic Versioning**: Full semver validation and comparison support +- **Caching**: Version is detected once and cached for performance +- **Build Information**: Comprehensive build metadata including git SHA and Python version +- **Error Handling**: Graceful fallbacks ensure the application always starts ## Version Detection -The application version is determined dynamically at runtime. The `tux/__init__.py` module contains a robust detection mechanism that checks multiple sources in a specific order of priority. This ensures that the version is always available, regardless of the environment. +The application version is determined dynamically at runtime using the unified version system. The `tux/__init__.py` module imports from `tux.shared.version` and exposes the detected version as `__version__`. The `version` field in `pyproject.toml` is intentionally set to a static placeholder (`0.0.0`) because the true version is resolved dynamically. @@ -23,32 +40,59 @@ The `version` field in `pyproject.toml` is intentionally set to a static placeho The version is sourced by trying the following methods in order, stopping at the first success: 1. **`TUX_VERSION` Environment Variable**: - - **Usage**: A runtime override. + - **Usage**: A runtime override for testing, deployment, or CI/CD scenarios. - **Example**: `TUX_VERSION=1.2.3-custom tux --dev start` - **Priority**: Highest. If set, this value is always used. + - **Use Cases**: + - Testing with specific versions + - Production deployments with custom versioning + - CI/CD pipelines that need to override detected versions 2. **`VERSION` File**: - - **Usage**: The primary versioning method for Docker images. This file is generated during the Docker build process. - - **Location**: Project root (`/app/VERSION` inside the container). + - **Usage**: The primary versioning method for Docker images and production deployments. + - **Location**: Project root (`/app/VERSION` inside containers). + - **Creation**: Generated during Docker build process or manually created for releases. + - **Use Cases**: + - Docker containers where git history may not be available + - Release builds where exact version control is required + - Environments where git operations are restricted 3. **Git Tags (`git describe`)**: - **Usage**: The standard for development environments where the Git history is available. - - **Format**: It produces version strings like: + - **Format**: Produces version strings like: - `1.2.3`: For a commit that is tagged directly. - `1.2.3-10-gabc1234`: For a commit that is 10 commits ahead of the `v1.2.3` tag. - - `1.2.3-10-gabc1234-dirty`: If there are uncommitted changes. + - `1.2.3-10-gabc1234-dirty`: If there are uncommitted changes (cleaned for semver compatibility). - **Note**: The leading `v` from tags (e.g., `v1.2.3`) is automatically removed. + - **Use Cases**: + - Development environments with full git history + - Local testing and development + - CI/CD environments with git access -4. **Package Metadata (`importlib.metadata`)**: - - **Usage**: For when Tux is installed as a package from PyPI or a wheel file. - - **Mechanism**: Reads the version from the installed package's metadata. - -5. **Fallback to `"dev"`**: +4. **Fallback to `"dev"`**: - **Usage**: A final fallback if all other methods fail, ensuring the application can always start. + - **Use Cases**: + - Environments without git access + - Missing VERSION files + - Fallback when all detection methods fail + +### Version System API + +The unified version system provides several utility functions: + +```python +from tux.shared.version import ( + get_version, # Get current version + is_semantic_version, # Check if version is valid semver + compare_versions, # Compare two semantic versions + get_version_info, # Get detailed version components + get_build_info, # Get build metadata +) +``` ## Release Cycle and Git Tagging -The release process is centered around Git tags. +The release process is centered around Git tags and follows semantic versioning principles. 1. **Create a Release**: To create a new version, create and push an annotated Git tag: @@ -60,32 +104,122 @@ The release process is centered around Git tags. 2. **Development Version**: Between releases, any new commits will result in a development version string (e.g., `1.2.3-5-g567def8`), indicating progress since the last tag. +3. **Pre-release Versions**: Use proper semver pre-release identifiers: + + ```sh + # Release candidates + git tag -a v1.2.3-rc.1 -m "Release candidate v1.2.3-rc.1" + + # Beta versions + git tag -a v1.2.3-beta.1 -m "Beta v1.2.3-beta.1" + + # Alpha versions + git tag -a v1.2.3-alpha.1 -m "Alpha v1.2.3-alpha.1" + ``` + ## Docker Image Tagging -Our Docker build process is designed to bake the version directly into the image, ensuring traceability. +Our Docker build process is designed to bake the version directly into the image, ensuring traceability and consistency with the unified version system. -- **Build Process**: The `Dockerfile` uses a build argument (`VERSION`) to create a `VERSION` file inside the image. This file becomes the source of truth for the version within the container. +### Build Process -- **Building an Image**: To build a versioned image, pass the `VERSION` argument, preferably derived from `git describe`: +The `Dockerfile` uses build arguments to create a `VERSION` file inside the image: - ```sh - # Recommended command to build a production image - docker build \ - --build-arg VERSION=$(git describe --tags --always --dirty | sed 's/^v//') \ - --target production \ - -t your-registry/tux:latest . - ``` +```dockerfile +ARG VERSION="" +ARG GIT_SHA="" +ARG BUILD_DATE="" - You can also tag the image with the specific version: +# Generate version file using build args with fallback +RUN set -eux; \ + if [ -n "$VERSION" ]; then \ + echo "$VERSION" > /app/VERSION; \ + else \ + echo "dev" > /app/VERSION; \ + fi +``` - ```sh - # Tag with the specific version for better tracking - VERSION_TAG=$(git describe --tags --always --dirty | sed 's/^v//') - docker build \ - --build-arg VERSION=$VERSION_TAG \ - --target production \ - -t your-registry/tux:$VERSION_TAG \ - -t your-registry/tux:latest . - ``` +### Building Versioned Images + +To build a versioned image, pass the `VERSION` argument: + +```sh +# Recommended command to build a production image +docker build \ + --build-arg VERSION=$(git describe --tags --always --dirty | sed 's/^v//') \ + --target production \ + -t your-registry/tux:latest . +``` + +You can also tag the image with the specific version: + +```sh +# Tag with the specific version for better tracking +VERSION_TAG=$(git describe --tags --always --dirty | sed 's/^v//') +docker build \ + --build-arg VERSION=$VERSION_TAG \ + --target production \ + -t your-registry/tux:$VERSION_TAG \ + -t your-registry/tux:latest . +``` + +### GitHub Actions Integration + +Our GitHub Actions workflows automatically handle version generation: + +- **PR Builds**: Generate versions like `pr-123-abc1234` +- **Release Builds**: Use the git tag version (e.g., `1.2.3`) +- **Docker Builds**: Pass the generated version as build arguments This ensures that even in a detached production environment without Git, the application reports the correct version it was built from. + +## Testing the Version System + +The version system includes comprehensive tests (`tests/unit/test_version_system.py`) that cover: + +- Version detection from all sources +- Priority order validation +- Edge cases and error handling +- Semantic version validation +- Build information generation +- Integration with other components + +Run the tests with: + +```sh +uv run pytest tests/unit/test_version_system.py -v +``` + +## Troubleshooting + +### Common Issues + +1. **Version shows as "dev"**: + - Check if you're in a git repository + - Verify the VERSION file exists and contains a valid version + - Ensure TUX_VERSION environment variable is not set to an empty value + +2. **Git describe fails**: + - Ensure you have at least one git tag + - Check git repository integrity + - Verify git is available in the environment + +3. **Docker version mismatch**: + - Ensure VERSION build arg is passed correctly + - Check that the VERSION file is created in the container + - Verify the Dockerfile version generation logic + +### Debugging + +You can debug version detection by checking the version system directly: + +```python +from tux.shared.version import VersionManager + +manager = VersionManager() +print(f"Detected version: {manager.get_version()}") +print(f"Build info: {manager.get_build_info()}") +print(f"Is semantic version: {manager.is_semantic_version()}") +``` + +This unified version system ensures consistent, reliable versioning across all environments while maintaining the flexibility needed for different deployment scenarios. diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 000000000..f9e1eb3fa --- /dev/null +++ b/alembic.ini @@ -0,0 +1,86 @@ +[alembic] +# path to migration scripts +script_location = src/tux/database/migrations + +# template used to generate migration files +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path +prepend_sys_path = src + +# timezone to use when rendering the date within the migration file +# as well as the filename. +timezone = UTC + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment file as part of +# the 'revision' environment script, instead of invoking +# the migration class directly +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version path separator; defaults to os.sep +# version_path_separator = os # Use 'os' if using os.sep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# This setting is used by pytest-alembic to locate migration scripts +version_locations = src/tux/database/migrations/versions + +# Database URL - will be overridden by env.py based on environment +sqlalchemy.url = postgresql://placeholder + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - uses the console_scripts entry point defined in setup.cfg +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 120 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..85eb68324 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,123 @@ +--- +# ============================================================================== +# TUX DISCORD BOT - MODERN CODECOV CONFIGURATION +# ============================================================================== +# +# This configuration follows current Codecov best practices and aligns with +# your current test organization (unit vs integration tests). +# +# DOCUMENTATION: https://docs.codecov.com/docs/codecov-yaml +# ============================================================================== +# ============================================================================== +# CODECOV BEHAVIOR SETTINGS +# ============================================================================== +codecov: + require_ci_to_pass: true + # yamllint disable-line rule:truthy + max_report_age: false + disable_default_path_fixes: false +# ============================================================================== +# COVERAGE REQUIREMENTS +# ============================================================================== +coverage: + precision: 2 + round: down + range: 70...100 + status: + project: + # Overall project coverage - progressive improvement + default: + target: auto + threshold: 1% + informational: true # Don't block PRs while building up test suite + + # Critical components with higher standards + database: + target: 85% + threshold: 2% + informational: true + paths: [src/tux/database/] + core: + target: 80% + threshold: 2% + informational: true + paths: [src/tux/core/, src/tux/main.py, src/tux/help.py] + patch: + # New code coverage requirements + default: + target: 80% + threshold: 5% + informational: true + only_pulls: true +# ============================================================================== +# TEST FLAG DEFINITIONS +# ============================================================================== +# These align with your pytest markers and test organization +flags: + unit: + paths: [src/tux/] + carryforward: true + integration: + paths: [src/tux/] + carryforward: true + e2e: + paths: [src/tux/] + carryforward: true +# ============================================================================== +# FILE IGNORING +# ============================================================================== +ignore: + # Test files and development artifacts + - tests/ + - conftest.py + - .pytest_cache/ + - .ruff_cache/ + - htmlcov/ + + # Build and environment files + - .venv/ + - typings/ + - __pycache__/ + + # Project management files + - docs/ + - scripts/ + - assets/ + - logs/ + - '*.md' + - '*.toml' + - '*.lock' + - '*.nix' + - flake.* + - shell.nix + + # Generated files + - prisma/ +# ============================================================================== +# PARSER CONFIGURATION +# ============================================================================== +parsers: + v1: + include_full_missed_files: true +# ============================================================================== +# COMMENT CONFIGURATION +# ============================================================================== +comment: + layout: condensed_header, diff, flags, components, condensed_files, condensed_footer + behavior: default + require_changes: true + require_base: false + require_head: true + after_n_builds: 1 + show_carryforward_flags: true +# ============================================================================== +# GITHUB INTEGRATION +# ============================================================================== +# Enhanced integration with GitHub's pull request interface +github_checks: + annotations: true # Show line-by-line coverage in PR file diffs +# ============================================================================== +# PATH NORMALIZATION +# ============================================================================== +# Fix coverage.py path mapping for src/tux structure +fixes: [.*/src/tux/::src/tux/, src/tux/::src/tux/] diff --git a/config/settings.yml.example b/config/settings.yml.example deleted file mode 100644 index 5eed2e360..000000000 --- a/config/settings.yml.example +++ /dev/null @@ -1,124 +0,0 @@ -# This is a example configuration file for Tux -# Change the values to your liking and rename the file to settings.yml - -BOT_INFO: - PROD_PREFIX: "$" - DEV_PREFIX: "~" # You can enable dev mode in .env - BOT_NAME: "Tux" # This may not apply everywhere, WIP (Best to keep it as Tux for now). Help command will be changed to be less Tux-specific if you change this. - HIDE_BOT_OWNER: false # Hide bot owner and sysadmin from help command - # Available substitutions: - # {member_count} - total member count of all guilds - # {guild_count} - total guild count - # {bot_name} - bot name - # {bot_version} - bot version - # {prefix} - bot prefix - ACTIVITIES: | - [ - {"type": "watching", "name": "{member_count} members"}, - {"type": "listening", "name": "{guild_count} guilds"}, - {"type": "playing", "name": "{bot_name} {bot_version}"}, - {"type": "watching", "name": "All Things Linux"}, - {"type": "playing", "name": "with fire"}, - {"type": "watching", "name": "linux tech tips"}, - {"type": "listening", "name": "mpd"}, - {"type": "watching", "name": "a vast field of grain"}, - {"type": "playing", "name": "i am calling about your car's extended warranty"}, - {"type": "playing", "name": "SuperTuxKart"}, - {"type": "playing", "name": "SuperTux 2"}, - {"type": "watching", "name": "Gentoo compile..."}, - {"type": "watching", "name": "Brodie Robertson"}, - {"type": "listening", "name": "Terry Davis on YouTube"}, - {"type": "playing", "name": "with Puffy"}, - {"type": "watching", "name": "the stars"}, - {"type": "watching", "name": "VLC"}, - {"type": "streaming", "name": "SuperTuxKart", "url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ"} - ] - -# This allows sysadmins to use the eval and jsk commands which can execute arbitrary code. -# Do enable if: -# - Tux is dockerized -# - You trust your sysadmins with anything that the docker container can do (e.g if they already can access the host system) -# - You are a small server -# DO NOT ENABLE IF: -# - Tux is not dockerized and you do not trust your sysadmins with the host system -# - You are a large server and Tux has full permissions -# - You do not trust your sysadmins with anything that the docker container can do -# - IF YOU ARE A MULTIPLE SERVER INSTANCE, DO NOT ENABLE IT FOR THE LOVE OF GOD -# If you are not sure, do not enable this. -ALLOW_SYSADMINS_EVAL: false - -USER_IDS: - # These have access to all permissions in all servers, except for $eval and $jsk commands (unless set to true). - # Only give these to people you trust with the bot and who are able to handle the responsibilities that come with it. - SYSADMINS: - - 123456789012345679 - - 123456789012345679 - - # This should be the person who owns the bot and nobody else unless you ABSOLUTELY know what you are doing. - # This person has access to all permissions in all servers, including $eval and $jsk commands. - BOT_OWNER: 123456789012345679 - -# This adds a temporary voice channel feature to the bot, you can join the channel to create a channel called /tmp/ and move to it. -# Channels are deleted when the last person leaves them. -# Set this to the category ID where you want the temporary voice channels to be created. -# Temporary channels will be put at the bottom of the category. -TEMPVC_CATEGORY_ID: 123456789012345679 -# Set this to the channel ID where you want the temporary voice channels to be created. -TEMPVC_CHANNEL_ID: 123456789012345679 - -# This will automatically give people with a status regex a role. -STATUS_ROLES: - #- server_id: 123456789012345679 - # status_regex: ".*" - # role_id: 123456789012345679 - -SNIPPETS: - LIMIT_TO_ROLE_IDS: false # Only allow users with the specified role IDs to use the snippet command - ACCESS_ROLE_IDS: - - 123456789012345679 - - 123456789012345679 - -XP: - XP_BLACKLIST_CHANNELS: # Channels where XP will not be counted - - 123456789012345679 - - 123456789012345679 - XP_ROLES: # Roles that will be given to users when they reach a certain level - - level: 5 - role_id: 123456789012345679 - - level: 10 - role_id: 123456789012345679 - - level: 15 - role_id: 123456789012345679 - - level: 20 - role_id: 123456789012345679 - - level: 25 - role_id: 123456789012345679 - - XP_MULTIPLIERS: # Multipliers for certain roles - - role_id: 123456789012345679 - multiplier: 1.5 - - XP_COOLDOWN: 1 # Delay in seconds between XP messages - - LEVELS_EXPONENT: 1 # Exponent for the level formula - SHOW_XP_PROGRESS: false # Shows required XP for the next level in the level command - ENABLE_XP_CAP: false # if true, XP will still be counted, but not shown beyond the cap in the level command - -GIF_LIMITER: # Limits the amount of gifs a user can send in a channel - RECENT_GIF_AGE: 60 - - GIF_LIMIT_EXCLUDE: - - 123456789012345 - - GIF_LIMITS_USER: - "123456789012345": 2 - GIF_LIMITS_CHANNEL: - "123456789012345": 3 - -# If you do not have an IRC bridge running, ignore these options -# Allows messages from these webhooks to use only the $s and $snippet commands (for now) -IRC: - BRIDGE_WEBHOOK_IDS: - - 123456789012345679 - - 123456789012345679 - - 123456789012345679 diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml deleted file mode 100644 index 9fcd28451..000000000 --- a/docker-compose.dev.yml +++ /dev/null @@ -1,284 +0,0 @@ -# ============================================================================== -# SERVICES CONFIGURATION - DEVELOPMENT ENVIRONMENT -# ============================================================================== -services: - # ============================================================================ - # TUX BOT SERVICE - Development Container - # ============================================================================ - # Purpose: Runs the Tux Discord bot in development mode with live reloading - # Features: Code synchronization, automatic rebuilds, development tools - # Performance: Higher resource limits for development workloads - # ============================================================================ - tux: - # CONTAINER IDENTIFICATION - # Development-specific name to avoid conflicts with production containers - # Clearly identifies this as a development instance - container_name: tux-dev - - # IMAGE CONFIGURATION - # Uses local development image built from dev stage of Dockerfile - # Contains development tools, debugging utilities, and additional packages - image: tux:dev - - # BUILD CONFIGURATION - # Always builds from local source for development - # Uses development target with full tooling and debugging capabilities - build: - # Build context includes entire project directory - context: . - # Dockerfile location (standard) - dockerfile: Dockerfile - # Target development stage with debugging tools and dev dependencies - target: dev - - # DEVELOPMENT OVERRIDE COMMAND - # Skip prisma generate in CMD to avoid read-only filesystem issues - # Can be run manually after container starts - command: - - sh - - -c - - exec poetry run tux --dev start - - # DEVELOPMENT WORKFLOW CONFIGURATION - # Docker BuildKit watch feature for live development - # Provides real-time code synchronization and intelligent rebuilds - develop: - # WATCH CONFIGURATION - # Monitors filesystem changes and syncs/rebuilds as appropriate - # Optimizes development workflow with minimal container restarts - watch: - # FILE SYNCHRONIZATION (Hot Reload) - # Syncs code changes without rebuilding the container - # Fastest feedback loop for code changes - - action: sync - # Watch entire project directory - path: . - # Sync to app directory in container - target: /app/ - # IGNORE PATTERNS - # Excludes files that don't need syncing or would cause issues - # Performance optimization to reduce sync overhead - ignore: - # Cache directories (not needed in sync) - - .cache/ - # IDE configurations (not needed in container) - - .idea/ - # Virtual environment (managed by container) - - .venv/ - # Editor configurations (not needed in container) - - .vscode/ - # Python cache files (regenerated automatically) - - '**/__pycache__/' - - '**/*.pyc' - # Log files (not needed in sync) - - '*.log' - # Editor temporary files - - '*.swp' - - .*.swp - - '*~' - - # DEPENDENCY REBUILD TRIGGERS - # Files that require full container rebuild when changed - # These changes affect the environment setup and need fresh build - - # Python dependencies changed - rebuild required - - action: rebuild - path: pyproject.toml - - # Lock file updated - rebuild required for dependency consistency - - action: rebuild - path: poetry.lock - - # Database schema changes - rebuild required for Prisma client generation - - action: rebuild - path: prisma/schema/ - - # VOLUME MOUNTS - # Development-specific volumes with different naming to avoid production conflicts - # Focuses on persistence of development data without read-only restrictions - volumes: - # DEVELOPMENT CACHE VOLUME - # Separate cache volume for development to avoid conflicts with production - # Contains development-specific cache data and temporary files - - tux_dev_cache:/app/.cache - - # DEVELOPMENT TEMPORARY VOLUME - # Separate temporary volume for development work - # Used for development artifacts, debugging files, etc. - - tux_dev_temp:/app/temp - - # USER HOME VOLUME - # Single volume for all user cache/config directories (.cache, .npm, etc.) - # Prevents read-only filesystem errors and covers all CLI tools - - tux_dev_user_home:/home/nonroot - - # ENVIRONMENT CONFIGURATION - # Environment variables loaded from .env file - # Same as production but may contain different values for development - # DEVELOPMENT: May include debug flags, development database URLs, etc. - env_file: - - .env - - # RESTART POLICY - # Automatic restart for development convenience - # Helps maintain development environment during crashes and testing - restart: unless-stopped - - # RESOURCE MANAGEMENT - # Higher resource limits for development workloads - # Development often requires more resources for compilation, debugging, etc. - deploy: - resources: - # RESOURCE LIMITS (Development) - # Higher limits to accommodate development tools and processes - limits: - memory: 1g # Maximum 1GB RAM (double production) - cpus: '1.0' # Maximum 1 full CPU core (double production) - - # RESOURCE RESERVATIONS (Development) - # Higher reservations for better development performance - reservations: - memory: 512m # Guaranteed 512MB RAM (double production) - cpus: '0.5' # Guaranteed 0.5 CPU cores (double production) - - # LOGGING CONFIGURATION - # Same logging setup as production for consistency - # Helps developers understand production logging behavior - logging: - # JSON structured logging for development log analysis - driver: json-file - - # Log rotation to prevent development disk space issues - options: - max-size: 10m # Rotate logs when they reach 10MB - max-file: '3' # Keep maximum 3 rotated log files -# ============================================================================== -# VOLUMES CONFIGURATION - DEVELOPMENT ENVIRONMENT -# ============================================================================== -# Development-specific named volumes to avoid conflicts with production -# These volumes are isolated from production and can be safely removed -# for clean development environment resets -# ============================================================================== -volumes: - # DEVELOPMENT CACHE VOLUME - # Stores development-specific cache data - # Contains: Development API cache, debug cache, test data, etc. - # Isolation: Completely separate from production cache - # Lifecycle: Can be reset anytime for clean development environment - tux_dev_cache: - driver: local # Local Docker volume driver (default) - - # DEVELOPMENT TEMPORARY VOLUME - # Stores development temporary files and artifacts - # Contains: Debug files, development logs, test artifacts, etc. - # Isolation: Separate from production temporary data - # Lifecycle: Safe to clear for clean development state - tux_dev_temp: - driver: local # Local Docker volume driver (default) - - # DEVELOPMENT USER HOME VOLUME - # Stores all user cache and config directories - # Contains: .cache (Prisma), .npm, .config, and other CLI tool data - # Isolation: Separate from production user data - # Lifecycle: Persistent to avoid re-downloading tools and cache - tux_dev_user_home: - driver: local # Local Docker volume driver (default) -# ============================================================================== -# DEVELOPMENT WORKFLOW BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. LIVE DEVELOPMENT: -# - Real-time code synchronization with Docker BuildKit watch -# - Intelligent rebuild triggers for dependency changes -# - Optimized ignore patterns for performance -# - Hot reload for rapid iteration -# -# 2. DEVELOPMENT ISOLATION: -# - Separate container name and volumes from production -# - Development-specific image with debugging tools -# - Isolated environment that doesn't affect production -# -# 3. RESOURCE OPTIMIZATION: -# - Higher resource limits for development workloads -# - Adequate resources for compilation and debugging -# - Performance optimized for development tasks -# -# 4. WORKFLOW EFFICIENCY: -# - Automatic restart for development convenience -# - Easy shell access for debugging and development -# - Consistent logging with production for familiarity -# -# 5. DEPENDENCY MANAGEMENT: -# - Automatic rebuilds on dependency file changes -# - Schema change detection for database updates -# - Smart rebuild triggers to minimize wait time -# -# DEVELOPMENT WORKFLOW: -# --------------------- -# 1. Start development environment: -# docker-compose -f docker-compose.dev.yml up -# -# 2. Edit code - changes sync automatically -# (No restart needed for code changes) -# -# 3. Update dependencies in pyproject.toml: -# (Container rebuilds automatically) -# -# 4. Debug with shell access: -# docker-compose -f docker-compose.dev.yml exec tux bash -# -# 5. View logs: -# docker-compose -f docker-compose.dev.yml logs -f tux -# -# 6. Clean restart: -# docker-compose -f docker-compose.dev.yml down -# docker-compose -f docker-compose.dev.yml up --build -# -# ============================================================================== -# -# TUX CLI COMMANDS (Recommended): -# -------------------------------- -# Build: poetry run tux --dev docker build -# Start: poetry run tux --dev docker up [-d|--build] -# Logs: poetry run tux --dev docker logs -f -# Shell: poetry run tux --dev docker shell -# Stop: poetry run tux --dev docker down -# -# Development workflow (from host): -# poetry run tux --dev docker exec tux "tux dev lint" -# poetry run tux --dev docker exec tux "pytest" -# -# Database (from host): -# poetry run tux --dev docker exec tux "tux db push" -# poetry run tux --dev docker exec tux "tux db migrate --name " -# -# DEVELOPMENT COMMANDS: -# --------------------- -# Start development: -# docker-compose -f docker-compose.dev.yml up -# -# Start in background: -# docker-compose -f docker-compose.dev.yml up -d -# -# Force rebuild: -# docker-compose -f docker-compose.dev.yml up --build -# -# Shell access: -# docker-compose -f docker-compose.dev.yml exec tux bash -# -# Run linting: -# docker-compose -f docker-compose.dev.yml exec tux poetry run tux dev lint -# -# Run tests: -# docker-compose -f docker-compose.dev.yml exec tux poetry run pytest -# -# Database operations: -# docker-compose -f docker-compose.dev.yml exec tux poetry run tux --dev db push -# -# Stop development: -# docker-compose -f docker-compose.dev.yml down -# -# Clean reset (removes volumes): -# docker-compose -f docker-compose.dev.yml down -v -# -# ============================================================================== diff --git a/docker-compose.yml b/docker-compose.yml index c05a6997a..814d416d8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,243 +1,216 @@ -# ============================================================================== -# SERVICES CONFIGURATION -# ============================================================================== +--- +# Docker Compose configuration for Tux +# Simple self-hosted setup + services: - # ============================================================================ - # TUX BOT SERVICE - Main Application Container - # ============================================================================ - # Purpose: Runs the Tux Discord bot in production mode - # Security: Hardened with read-only filesystem and security options - # Monitoring: Health checks and structured logging enabled - # ============================================================================ - tux: - # CONTAINER IDENTIFICATION - # Fixed name for easier management and log identification - # Allows direct docker commands: docker logs tux, docker exec tux sh - container_name: tux + tux-postgres: + container_name: tux-postgres + hostname: tux-postgres + image: postgres:15-alpine + restart: "no" + environment: + POSTGRES_DB: ${POSTGRES_DB:-tuxdb} + POSTGRES_USER: ${POSTGRES_USER:-tuxuser} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-ChangeThisToAStrongPassword123!} + POSTGRES_INITDB_ARGS: --encoding=UTF-8 --lc-collate=C --lc-ctype=C + ports: + - "${POSTGRES_PORT:-5432}:5432" + volumes: + - tux_postgres_data:/var/lib/postgresql/data + - ./docker/postgres/postgresql.conf:/etc/postgresql/postgresql.conf:ro + command: postgres -c config_file=/etc/postgresql/postgresql.conf + + # Enhanced logging configuration + logging: + driver: json-file + options: + max-size: "10m" + max-file: "3" + compress: "true" - # IMAGE CONFIGURATION - # Uses pre-built image from GitHub Container Registry for faster deployment - # Falls back to local build if image is not available in registry - image: ghcr.io/allthingslinux/tux:latest + healthcheck: + test: + - CMD-SHELL + - pg_isready -U ${POSTGRES_USER:-tuxuser} -d ${POSTGRES_DB:-tuxdb} -h localhost + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s - # BUILD CONFIGURATION - # Local build fallback when registry image is unavailable - # Uses production target for optimized, minimal image + tux: + container_name: tux + hostname: tux + image: ${TUX_IMAGE:-ghcr.io/allthingslinux/tux}:${TUX_IMAGE_TAG:-latest} build: - # Build context includes entire project directory context: . - # Dockerfile location (can be omitted if using default) dockerfile: Dockerfile - # Target production stage for minimal, secure image target: production - - # VOLUME MOUNTS - # Strategic mounting for configuration, code, and persistent data + args: + VERSION: ${VERSION:-dev} + GIT_SHA: ${GIT_SHA:-} + BUILD_DATE: ${BUILD_DATE:-} + DEVCONTAINER: ${DEVCONTAINER:-0} volumes: - # CONFIGURATION MOUNT (Read-Only) - # Bot configuration files - mounted read-only for security - # Changes require container restart to take effect - ./config:/app/config:ro - - # EXTENSIONS MOUNT (Read-Only) - # Bot extensions/plugins - mounted read-only for security - # Allows hot-reloading of extensions without full rebuild - - ./tux/extensions:/app/tux/extensions:ro - - # ASSETS MOUNT (Read-Only) - # Static assets like images, sounds, etc. - read-only for security - # Shared between development and production for consistency + - ./src/tux/plugins:/app/tux/plugins:ro - ./assets:/app/assets:ro - - # CACHE VOLUME (Read-Write, Persistent) - # Named volume for bot cache data (user data, API responses, etc.) - # Persists across container restarts for better performance + # Migration mount - always mounted, controlled by USE_LOCAL_MIGRATIONS env var + - ./src/tux/database/migrations:/app/tux/database/migrations:ro - tux_cache:/app/.cache - - # TEMPORARY FILES VOLUME (Read-Write, Persistent) - # Named volume for temporary files that need persistence - # Separate from system /tmp for better control and persistence - tux_temp:/app/temp - - # USER HOME VOLUME (Read-Write, Persistent) - # Named volume for all user cache/config directories - # Prevents read-only filesystem errors for all CLI operations - tux_user_home:/home/nonroot - - # ENVIRONMENT CONFIGURATION - # Environment variables loaded from .env file - # Contains sensitive data like bot tokens, API keys, database URLs - # SECURITY: .env file should be in .gitignore and properly secured env_file: - .env - - # RESTART POLICY - # Automatically restart container unless explicitly stopped - # Handles bot crashes, system reboots, and temporary failures - # Options: no, always, on-failure, unless-stopped + environment: + TUX_VERSION: ${VERSION:-dev} + # Development-specific overrides + DEBUG: ${DEBUG:-false} + # Migration control + USE_LOCAL_MIGRATIONS: ${USE_LOCAL_MIGRATIONS:-true} + FORCE_MIGRATE: ${FORCE_MIGRATE:-false} + # Startup configuration + MAX_STARTUP_ATTEMPTS: ${MAX_STARTUP_ATTEMPTS:-3} + STARTUP_DELAY: ${STARTUP_DELAY:-5} + # Database configuration for Docker + POSTGRES_HOST: tux-postgres + POSTGRES_PORT: 5432 + POSTGRES_DB: ${POSTGRES_DB:-tuxdb} + POSTGRES_USER: ${POSTGRES_USER:-tuxuser} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-ChangeThisToAStrongPassword123!} restart: unless-stopped - - # HEALTH CHECK CONFIGURATION - # Monitors container health for automatic restart and load balancer integration - # More sophisticated than Dockerfile health check for production monitoring + depends_on: + tux-postgres: + condition: service_healthy healthcheck: - # Simple Python import test to verify bot can start - # Lighter than full bot initialization for faster health checks test: - CMD - python - -c - - import sys; sys.exit(0) - - # Health check timing configuration - interval: 30s # Check every 30 seconds - timeout: 10s # Allow 10 seconds for check to complete - retries: 3 # Mark unhealthy after 3 consecutive failures - start_period: 40s # Wait 40 seconds before first check (startup time) - - # RESOURCE MANAGEMENT - # Production resource limits and reservations for stable operation - # Prevents bot from consuming excessive resources and affecting other services - deploy: - resources: - # RESOURCE LIMITS (Hard Caps) - # Container will be killed if it exceeds these limits - limits: - memory: 512M # Maximum 512MB RAM usage - cpus: '0.5' # Maximum 0.5 CPU cores (50% of one core) - - # RESOURCE RESERVATIONS (Guaranteed Resources) - # Docker ensures these resources are always available to the container - reservations: - memory: 256M # Guaranteed 256MB RAM - cpus: '0.25' # Guaranteed 0.25 CPU cores (25% of one core) - - # SECURITY HARDENING - # Additional security options for production deployment + - | + import sys + try: + import tux.shared.config.env + # Additional check: ensure bot token is configured + from tux.shared.config.env import CONFIG + if not CONFIG.bot_token: + print("Bot token not configured", file=sys.stderr) + sys.exit(1) + print("Health check passed") + except Exception as e: + print(f"Health check failed: {e}", file=sys.stderr) + sys.exit(1) + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + # Production: Stop dependent services when this service fails + stop_grace_period: 30s security_opt: - # Prevents container from gaining new privileges during execution - # Protects against privilege escalation attacks - no-new-privileges:true - - # READ-ONLY FILESYSTEM - # Makes the root filesystem read-only for enhanced security - # Prevents malicious code from modifying system files - # Writable areas provided via tmpfs mounts below read_only: true - - # TEMPORARY FILESYSTEM MOUNTS - # Provides writable areas for system operations while maintaining security - # These are ephemeral and cleared on container restart tmpfs: - # Standard temporary directory with size limit - /tmp:size=100m - - # Variable temporary directory with smaller size limit - /var/tmp:size=50m - - # LOGGING CONFIGURATION - # Structured logging for production monitoring and debugging - # Prevents log files from consuming excessive disk space logging: - # JSON structured logging for better parsing by log aggregators driver: json-file - - # Log rotation configuration to prevent disk space issues options: - max-size: 10m # Rotate logs when they reach 10MB - max-file: '3' # Keep maximum 3 rotated log files -# ============================================================================== -# VOLUMES CONFIGURATION -# ============================================================================== -# Named volumes for persistent data that survives container restarts -# These volumes are managed by Docker and provide better performance -# and portability compared to bind mounts for application data -# ============================================================================== + max-size: 10m + max-file: '3' + + # Development mode with hot reload (only active when using --watch) + develop: + watch: + # Sync Python source code changes for hot reload + - action: sync + path: ./src + target: /app/src + ignore: + - __pycache__/ + - "*.pyc" + - "*.pyo" + - "*.pyd" + - .pytest_cache/ + - .mypy_cache/ + - .coverage + # Sync configuration changes + - action: sync + path: ./config + target: /app/config + # Sync custom modules + - action: sync + path: ./src/tux/plugins + target: /app/tux/plugins + # Sync assets + - action: sync + path: ./assets + target: /app/assets + # Rebuild when dependencies change + - action: rebuild + path: pyproject.toml + - action: rebuild + path: uv.lock + # Restart when environment or Docker config changes + - action: sync+restart + path: .env + target: /app/.env + - action: sync+restart + path: docker-compose.yml + target: /app/docker-compose.yml + + tux-adminer: + image: adminer:latest + container_name: tux-adminer + hostname: tux-adminer + restart: "no" + depends_on: + tux-postgres: + condition: service_healthy + + # Port mapping + ports: + - '${ADMINER_PORT:-8081}:8080' + + # Adminer configuration + environment: + ADMINER_DEFAULT_DRIVER: "pgsql" + ADMINER_DEFAULT_SERVER: "tux-postgres" + ADMINER_DEFAULT_DB: ${POSTGRES_DB:-tuxdb} + ADMINER_DEFAULT_USERNAME: ${POSTGRES_USER:-tuxuser} + ADMINER_DEFAULT_PASSWORD: ${POSTGRES_PASSWORD:-ChangeThisToAStrongPassword123!} + ADMINER_AUTO_LOGIN: "${ADMINER_AUTO_LOGIN:-true}" + ADMINER_PLUGINS: "backward-keys tables-filter dump-date dump-json dump-xml dump-zip edit-calendar enum-option foreign-system json-column pretty-json-column table-indexes-structure table-structure" + + configs: + - source: adminer-index.php + target: /var/www/html/index.php + - source: adminer-theme.css + target: /var/www/html/adminer.css + + # Enhanced logging configuration + logging: + driver: json-file + options: + max-size: "10m" + max-file: "3" + compress: "true" + + # Security configuration + security_opt: + - no-new-privileges:true + volumes: - # BOT CACHE VOLUME - # Stores bot cache data for improved performance across restarts - # Contains: Discord API cache, user data cache, command cache, etc. - # Persistence: Survives container restarts and updates - # Size: Grows based on bot usage, monitor in production + # Persistent data volumes tux_cache: - driver: local # Local Docker volume driver (default) - - # TEMPORARY FILES VOLUME - # Stores temporary files that need persistence across container restarts - # Contains: Downloaded files, processing artifacts, session data, etc. - # Persistence: Survives container restarts but can be cleared if needed - # Size: Should be monitored and cleaned periodically in production + driver: local tux_temp: - driver: local # Local Docker volume driver (default) - - # USER HOME VOLUME - # Stores all user cache and config directories - # Contains: .cache (Prisma), .npm, .config, and other CLI tool data - # Persistence: Critical for avoiding re-downloads and CLI performance - # Size: Relatively small but covers all user-space tool requirements + driver: local tux_user_home: - driver: local # Local Docker volume driver (default) -# ============================================================================== -# PRODUCTION DEPLOYMENT BEST PRACTICES IMPLEMENTED -# ============================================================================== -# -# 1. SECURITY HARDENING: -# - Read-only root filesystem with tmpfs for writable areas -# - No new privileges security option -# - Non-root user execution (configured in Dockerfile) -# - Read-only mounts for configuration and code -# -# 2. RESOURCE MANAGEMENT: -# - Memory and CPU limits to prevent resource exhaustion -# - Resource reservations to ensure minimum performance -# - Restart policy for automatic recovery -# -# 3. MONITORING & OBSERVABILITY: -# - Health checks for container health monitoring -# - Structured JSON logging for log aggregation -# - Log rotation to prevent disk space issues -# - Fixed container name for easier management -# -# 4. DATA PERSISTENCE: -# - Named volumes for cache and temporary data -# - Proper separation of read-only and read-write data -# - Volume organization for backup and maintenance -# -# 5. OPERATIONAL EXCELLENCE: -# - Clear restart policy for reliability -# - Environment file separation for security -# - Build fallback for deployment flexibility -# - Registry image for faster deployments -# -# ============================================================================== -# -# TUX CLI COMMANDS (Recommended): -# -------------------------------- -# Build: poetry run tux --prod docker build -# Start: poetry run tux --prod docker up [-d|--build] -# Logs: poetry run tux --prod docker logs -f -# Shell: poetry run tux --prod docker shell -# Stop: poetry run tux --prod docker down -# Database: poetry run tux --prod docker exec tux "tux db " -# -# PRODUCTION COMMANDS: -# -------------------- -# Production deployment: -# docker-compose up -d -# -# View logs: -# docker-compose logs -f tux -# -# Update bot: -# docker-compose pull && docker-compose up -d -# -# Rebuild from source: -# docker-compose up -d --build -# -# Stop bot: -# docker-compose down -# -# Stop and remove volumes (WARNING: destroys cache): -# docker-compose down -v -# -# ============================================================================== + driver: local + tux_postgres_data: + driver: local + +configs: + adminer-index.php: + file: ./docker/adminer/index.php + adminer-theme.css: + file: ./docker/adminer/adminer-theme.css diff --git a/docker/adminer/adminer-theme.css b/docker/adminer/adminer-theme.css new file mode 100644 index 000000000..72df2e4c2 --- /dev/null +++ b/docker/adminer/adminer-theme.css @@ -0,0 +1,749 @@ +/* + * Theme by Douglas Damasio [http://github.com/douglasdamasio] + * Based on Pepa Linha + * Color syntax inspired by Dracula Theme [https://draculatheme.com/] + * @version 1.0 (June 2020) + */ +html { + --bg: #282a36; + --fg: #f8f8f2; +} + +@import url('https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@300;400;600&family=Ubuntu:wght@500;700&display=swap'); + +:root { + --color-darkBackground: #282a36; + --color-darkCurrentLine: #44475a; + --color-darkForeground: #f8f8f2; + --color-darkComment: #6272a4; + --color-darkCyan: #8be9fd; + --color-darkGreen: #50fa7b; + --color-darkOrange: #ffb86c; + --color-darkPink: #ff79c6; + --color-darkPurple: #bd93f9; + --color-darkRed: #ff5555; + --color-darkYellow: #f1fa8c; + + --color-darkTitleSite: #bcc2cd; + --color-darkDraculaSite: #383a59; + --color-darkDraculaVSCode: #22222c; +} + +html, +body { + height: 100%; + background: var(--color-darkBackground); +} + +body { + font-family: 'Source Sans Pro', sans-serif; +} + +div { + color: var(--color-darkForeground); +} + +h1 { + margin: 0; + padding: 0; + background: none; + border: 0; +} + +h2 { + margin: 0; + padding: 0; + background: none; + border: 0; + font-family: 'Ubuntu', sans-serif; + font-size: 32px; + font-weight: 700; + color: var(--color-darkPink); + border-bottom: 1px solid var(--color-darkComment); + padding-bottom: 6px; + margin-bottom: 40px; +} + +.rtl h2 { + margin: 0; + margin-bottom: 40px; +} + +h3 { + font-size: 22px; + margin: 0 0 10px; + border-bottom: 1px solid var(--color-darkComment); + padding: 0; + padding-bottom: 6px; + color: var(--color-darkGreen); + +} + +p { + margin: 0; + margin-bottom: 15px; + align-items: center; + color: var(--color-darkForeground); +} + +a { + color: var(--color-darkPink); + text-decoration: none; +} + +a:visited { + color: var(--color-darkPurple); +} + +a:link:hover, +a:visited:hover { + color: var(--color-darkPink); + text-decoration: underline; +} + + +a[href*=charsets] { + display: none; +} + +table { + border: 0; + margin: 0; + margin-top: 15px; +} + +th, +td { + border: 0; + padding: 6px; + color: var(--color-darkOrange); +} + +th { + background: none; + color: var(--color-darkCyan); + font-weight: normal; +} + +tbody tr:hover td, +tbody tr:hover th { + background: var(--color-darkCurrentLine); +} + +table:not(.checkable) th { + min-width: 120px; +} + +#edit-fields th { + min-width: 0; +} + +thead td, +thead th { + background: var(--color-darkComment); +} + +thead td, +thead td a, +thead td a:link:hover, +thead td a:visited, +thead td a:visited:hover, +thead th, +thead th a, +thead th a:link:hover, +thead th a:visited, +thead th a:visited:hover { + color: var(--color-darkForeground); +} + +table.checkable, +p.links + table, +pre + table, +#edit-fields, +p + table, +h3 + table, +.scrollable table { + border: 1px solid var(--color-darkCurrentLine); + margin-bottom: 15px; +} + +table.checkable tbody tr:hover td, +table.checkable tbody tr:hover th { + background: var(--color-darkCurrentLine); +} + +.js .checkable .checked td, +.js .checkable .checked th { + background: var(--color-darkDraculaSite); +} + +.js .checkable thead .checked td, +.js .checkable thead .checked th { + background: var(--color-darkPurple); +} + +.odds tbody tr:nth-child(2n) { + background: var(--color-darkDraculaVSCode); +} + +fieldset { + display: inline-block; + padding: 15px; + padding-top: 5px; + margin: 0 0 15px; + border: 0; + background: var(--color-darkBackground); +} + +fieldset select { + margin-right: 5px; +} + +fieldset input[type=button], +fieldset input[type=submit], +fieldset p { + margin-bottom: 0; +} + +fieldset div p { + margin-top: 10px; +} + +legend { + display: inline-block; + padding: 6px 15px; + margin: 0 0 0 -15px; + background: var(--color-darkDraculaSite); + font-family: 'Source Sans Pro', sans-serif; + color: var(--color-darkOrange); +} + +legend a, +legend a:link:hover { + color: var(--color-darkOrange); + text-decoration: underline; +} + +code { + background: none; +} + +p code, +pre code, +pre[contenteditable=true] { + padding: 10px 15px; + display: block; + font-size: 17px; + margin-bottom: 15px; +} + +p code + a, +p code + a:link:hover, +p code + a:visited:hover { + margin-left: 15px; + position: relative; + top: -20px; + color: var(--color-darkOrange); + font-size: 12px; + text-decoration: underline; + text-transform: lowercase; +} + +#content { + margin: 0; + margin-left: 400px; + margin-right: 54px; + padding: 0; + padding-top: 50px; +} + +#content > p { + margin-bottom: 15px; + color: var(--color-darkForeground); +} + +.rtl #content { + margin: 0; + margin-left: 54px; + margin-right: 400px; + padding: 0; + padding-top: 50px; +} + +#menu { + width: 347px; + border-right: 1px solid var(--color-darkBackground); + box-shadow: inset -1px 0 0 #000000b4; + margin: 0; + padding: 0; + top: 0; + background: var(--color-darkDraculaVSCode); + bottom: 0; + position: fixed; + padding: 0 15px; + box-sizing: border-box; +} + +#menu h1 { + line-height: 50px; + margin: 10px 0; +} + +#menu h1 a { + font-style: normal; +} + +#menu h1 .version { + color: var(--color-darkPurple); +} + +#menu a { + color: var(--color-darkForeground); +} + +#menu p, +#tables { + border: 0; + padding: 0; +} + +#menu #dbs { + background: var(--color-darkDraculaVSCode); + padding: 10px 15px 15px; + border: 1px solid var(--color-darkForeground); + border-bottom: 0; + box-sizing: border-box; + color: var(--color-darkCyan); +} + +#menu #dbs select { + outline: 0; + border-color: var(--color-darkComment); + width: 100%; +} + +#menu p.links { + margin: 0 0 15px; + border: 1px solid var(--color-darkForeground); + border-top: 0; + text-align: center; + display: table; + width: 100%; + box-sizing: border-box; +} + +#menu p.links a { + padding: 8px; + margin: 0; + display: table-cell; + font-size: 12px; +} + +#menu p.links a:hover { + color: var(--color-darkPink); +} + +#menu p.links a.active { + font-weight: normal; + background: var(--color-darkCurrentLine); + color: var(--color-darkYellow); +} + +.tables-filter { + margin-top: 32px; + padding: 0; +} + +#content p.links { + margin: -10px 0 15px; +} + +#content p.links a { + padding: 8px; + margin: 0; + display: table-cell; + border: 1px solid var(--color-darkBackground); +} + +#content p.links a, +#content p.links a:visited, +#content p.links a:hover { + color: var(--color-darkCyan); +} + +#content p.links a.active { + font-weight: normal; + border: 1px solid var(--color-darkTitleSite); + background: var(--color-darkCurrentLine); +} + +#tables { + max-height: 100%; + margin: 15px -15px 32px !important; + position: absolute; + left: 15px; + right: 15px; + bottom: 0; + top: 220px; + overflow: hidden !important; + overflow-y: auto !important; +} + +.rtl #tables { + overflow: hidden !important; + overflow-y: auto !important; +} + +#tables a { + float: right; + padding: 6px 15px; +} + +.rtl #tables a { + float: none; +} + +#tables .structure, +#tables .view { + float: none; + display: block; +} + +.rtl #tables a:first-child, +.rtl #tables br + a { + float: left; + display: block; + margin-left: 15px; +} + +#tables a:hover, +#tables a:hover + a, +#tables a.active, +#tables a.active + a { + background: var(--color-darkBackground); + color: var(--color-darkPink); +} + +#tables br { + display: none; +} + +.js .column { + background: var(--color-darkDraculaVSCode); +} + +.js .checked .column { + background: var(--color-darkDraculaVSCode); +} + +.pages { + left: 400px; + background: var(--color-darkCyan); + color: var(--color-darkBackground); + font-weight: bold; + border: 0; + display: inline-block; + position: static; +} + +.pages a, +.pages a:link, +.pages a:link:hover, +.pages a:visited, +.pages a:visited:hover { + color: var(--color-darkBackground); + font-weight: normal; +} + +#breadcrumb { + margin: 0; + left: 400px; + background: none; + padding: 0; + padding-top: 25px; + font-size: 12px; +} + +#breadcrumb a { + color: var(--color-darkForeground); + text-decoration: underline; +} + +#breadcrumb, +#breadcrumb a:hover { + color: var(--color-darkTitleSite); +} + +.rtl #breadcrumb { + margin: 0; + padding: 0; + padding-top: 25px; + right: 400px; +} + +.logout, +.rtl .logout { + top: 20px; + right: 54px; + margin: 0; +} + +.rtl .logout { + right: auto; + left: 54px; +} + +#logout { + margin-top: 0; +} + +pre.jush, +input:not([type]), +input[type="color"], +input[type="email"], +input[type="number"], +input[type="password"], +input[type="tel"], +input[type="url"], +input[type="text"], +input[type="search"] { + border: 1px solid var(--color-darkCurrentLine); + background-color: var(--color-darkBackground); + padding: 6px; + margin: 0; + box-sizing: border-box; + color: var(--color-darkForeground); +} + +input::placeholder { + color: var(--color-darkForeground); +} + +table:not(#table) input:not([type]), +table:not(#table) input[type="color"], +table:not(#table) input[type="email"], +table:not(#table) input[type="number"], +table:not(#table) input[type="password"], +table:not(#table) input[type="tel"], +table:not(#table) input[type="url"], +table:not(#table) input[type="text"], +table:not(#table) input[type="search"] { + min-width: 280px; +} + +input[type=submit], +input[type=button] { + border: 0; + padding: 7px 12px; + cursor: pointer; + outline: 0; + box-shadow: none; + background: var(--color-darkGreen); + color: var(--color-darkBackground); + font-weight: bold; + margin-bottom: 5px; + transition: background .4s ease; + border-radius: 5px; + margin-top: 20px; +} + +input[type=submit][disabled], +input[type=button][disabled] { + background: var(--color-darkTitleSite) !important; + color: var(--color-darkBackground); + cursor: not-allowed; +} + +input[type=submit]:hover, +input[type=button]:hover, +input[type=submit]:focus, +input[type=button]:focus { + background: var(--color-darkGreen); + opacity: 0.8; +} + +.logout input[type=submit] { + background: var(--color-darkRed); + color: var(--color-darkForeground); +} + +.logout input[type=submit]:hover { + background: var(--color-darkRed); + opacity: 0.8; +} + +input.default, +input.default { + box-shadow: none; + background: var(--color-darkGreen); + color: var(--color-darkDraculaVSCode); + font-weight: bold; +} + +select { + box-sizing: border-box; + margin: 0; + padding: 6px; + border: 1px solid var(--color-darkCurrentLine); + background-color: var(--color-darkBackground); + color: var(--color-darkForeground); +} + +label { + cursor: pointer; + margin: 18px; + color: var(--color-darkOrange); +} + +.error, +.message { + margin: 0; + margin-bottom: 15px; + background: var(--color-darkCurrentLine); + color: var(--color-darkRed); +} + +#logins a, +#tables a, +#tables span { + background: none; +} + +#form > p { + margin-bottom: 15px; + color: var(--color-darkForeground); +} + + +#schema .table { + padding: 6px; +} + +#schema .table a { + display: block; + margin: -6px; + margin-bottom: 6px; + padding: 6px; + color: var(--color-darkBackground); + background: var(--color-darkPurple); +} + +#schema .table br { + display: none; +} + +#schema .table span { + display: block; + margin-bottom: 1px solid var(--color-darkDraculaVSCode); +} + +#lang { + position: fixed; + top: 30px; + right: calc(100% + 8px); + z-index: 10; + margin-right: -340px; + line-height: normal; + padding: 0; + left: auto; + font-size: 0; +} + +#lang select { + font-size: 12px; + padding: 0; + text-align: right; + border: 0; + background: none; + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; + cursor: pointer; + outline: 0; +} + +#lang select option { + text-align: right; +} + +.rtl #lang { + margin-right: 0; + left: 100%; + margin-left: -261px; + right: auto; +} + +.jush { + color: var(--color-darkForeground); +} + +.jush a { + color: var(--color-darkPurple); +} + +.jush-sql a, +.jush-sql_code a, +.jush-sqlite a, +.jush-pgsql a, +.jush-mssql a, +.jush-oracle a, +.jush-simpledb a { + font-weight: normal; +} + +.jush-bac, +.jush-php_bac, +.jush-bra, +.jush-mssql_bra, +.jush-sqlite_quo { + color: var(--color-darkYellow); +} + +.jush-php_quo, +.jush-quo, +.jush-quo_one, +.jush-php_eot, +.jush-apo, +.jush-sql_apo, +.jush-sqlite_apo, +.jush-sql_quo, +.jush-sql_eot { + color: var(--color-darkOrange); +} + +.jush-num, +.jush-clr { + color: var(--color-darkPurple); +} + +@media print { + .logout { + display: none; + } + + #breadcrumb { + position: static; + } + + #content { + margin: 0; + } +} + +.footer { + position: sticky; + bottom: 0; + margin-right: -20px; + border-top: 20px solid var(--color-darkBackground); + border-image: var(--color-darkBackground) 100% 0; + border-image-source: var(--color-darkBackground); + border-image-slice: 100% 0; + border-image-width: 1; + border-image-outset: 0; + border-image-repeat: stretch; +} + +.footer > div { + background: var(--color-darkBackground); + padding: 0 0 .5em; +} diff --git a/docker/adminer/index.php b/docker/adminer/index.php new file mode 100644 index 000000000..8e2a171b5 --- /dev/null +++ b/docker/adminer/index.php @@ -0,0 +1,23 @@ + getenv('ADMINER_DEFAULT_SERVER') ?: 'tux-postgres', + 'username' => getenv('ADMINER_DEFAULT_USERNAME') ?: 'tuxuser', + 'password' => getenv('ADMINER_DEFAULT_PASSWORD') ?: 'ChangeThisToAStrongPassword123!', + 'driver' => getenv('ADMINER_DEFAULT_DRIVER') ?: 'pgsql', + 'db' => getenv('ADMINER_DEFAULT_DB') ?: 'tuxdb', + ]; +} + +// Include the main Adminer application +include './adminer.php'; diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100755 index 000000000..44263a625 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,154 @@ +#!/bin/bash +set -e + +echo "🐧 Tux Docker Entrypoint" +echo "========================" + +# Configuration +MAX_STARTUP_ATTEMPTS=${MAX_STARTUP_ATTEMPTS:-3} +STARTUP_DELAY=${STARTUP_DELAY:-5} + +# Function to check if database is ready (simple socket check) +wait_for_db() { + echo "⏳ Waiting for database to be ready..." + local attempts=0 + local max_attempts=30 + + until python -c " +import socket +import sys +try: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(1) + result = sock.connect_ex(('$POSTGRES_HOST', $POSTGRES_PORT)) + sock.close() + sys.exit(0 if result == 0 else 1) +except Exception: + sys.exit(1) +"; do + attempts=$((attempts + 1)) + if [ $attempts -ge $max_attempts ]; then + echo "❌ Database connection timeout after $max_attempts attempts" + exit 1 + fi + echo "Database is unavailable - sleeping (attempt $attempts/$max_attempts)" + sleep 2 + done + echo "✅ Database is ready!" +} + +# Function to handle migrations +handle_migrations() { + echo "🔄 Handling database migrations..." + + # Change to the app directory where alembic.ini is located + cd /app + + # Check if we need to force migration + if [ "$FORCE_MIGRATE" = "true" ]; then + echo "⚠️ WARNING: Force migration can cause data inconsistency!" + echo "🔧 Force migrating database to head..." + python -m alembic stamp head + echo "✅ Database force migrated to head" + else + # Try normal migration + echo "🔄 Running normal migrations..." + if ! python -m alembic upgrade head; then + echo "⚠️ Migration failed, attempting to fix..." + echo "📊 Current migration status:" + python -m alembic current + echo "🔧 Attempting to stamp database as head..." + python -m alembic stamp head + echo "✅ Database stamped as head" + else + echo "✅ Migrations completed successfully" + fi + fi +} + +# Function to validate configuration +validate_config() { + echo "🔍 Validating configuration..." + + # Check for required environment variables + if [ -z "$BOT_TOKEN" ]; then + echo "❌ BOT_TOKEN is not set" + return 1 + fi + + # Test configuration loading + if ! python -c "import tux.shared.config.settings; print('✅ Configuration loaded successfully')"; then + echo "❌ Failed to load configuration" + return 1 + fi + + echo "✅ Configuration validation passed" + return 0 +} + +# Function to start the bot with retry logic +start_bot_with_retry() { + local attempts=0 + + while [ $attempts -lt $MAX_STARTUP_ATTEMPTS ]; do + attempts=$((attempts + 1)) + echo "🚀 Starting Tux bot (attempt $attempts/$MAX_STARTUP_ATTEMPTS)..." + + # Validate configuration before starting + if ! validate_config; then + echo "❌ Configuration validation failed" + if [ $attempts -ge $MAX_STARTUP_ATTEMPTS ]; then + echo "🛑 Maximum startup attempts reached. Exiting." + exit 1 + fi + echo "⏳ Waiting ${STARTUP_DELAY}s before retry..." + sleep $STARTUP_DELAY + continue + fi + + # Start the bot + if exec tux start; then + echo "✅ Bot started successfully" + return 0 + else + echo "❌ Bot failed to start (exit code: $?)" + if [ $attempts -ge $MAX_STARTUP_ATTEMPTS ]; then + echo "🛑 Maximum startup attempts reached. Exiting." + exit 1 + fi + echo "⏳ Waiting ${STARTUP_DELAY}s before retry..." + sleep $STARTUP_DELAY + fi + done +} + +# Signal handlers for graceful shutdown +cleanup() { + echo "" + echo "🛑 Received shutdown signal" + echo "🧹 Performing cleanup..." + + # Kill any child processes + if [ -n "$BOT_PID" ]; then + echo "🔄 Stopping bot process (PID: $BOT_PID)..." + kill -TERM "$BOT_PID" 2>/dev/null || true + wait "$BOT_PID" 2>/dev/null || true + fi + + echo "✅ Cleanup complete" + exit 0 +} + +# Set up signal handlers +trap cleanup SIGTERM SIGINT + +# Main execution +echo "⏳ Waiting for database to be ready..." +wait_for_db + +echo "🔄 Handling database migrations..." +handle_migrations + +# Start bot with retry logic and validation (always enabled) +echo "🚀 Starting bot with smart orchestration..." +start_bot_with_retry diff --git a/docker/postgres/postgresql.conf b/docker/postgres/postgresql.conf new file mode 100644 index 000000000..ccd7b98d6 --- /dev/null +++ b/docker/postgres/postgresql.conf @@ -0,0 +1,153 @@ +# ============================================================================= +# TUX POSTGRESQL CONFIGURATION +# ============================================================================= +# Optimized PostgreSQL configuration for Tux Discord bot +# Based on professional XMPP server configurations +# ============================================================================= + +# ============================================================================= +# CONNECTION AND AUTHENTICATION +# ============================================================================= +listen_addresses = '*' # Listen on all interfaces for Docker networking +# Production: use 'localhost' or specific IP +# Development: use Docker network range for container access +max_connections = 100 # Maximum concurrent connections +superuser_reserved_connections = 3 # Reserved for superuser + +# ============================================================================= +# MEMORY CONFIGURATION +# ============================================================================= +# Shared buffers: 25% of RAM for dedicated database server +shared_buffers = 256MB # Increased from default 128MB + +# Effective cache size: 75% of RAM +effective_cache_size = 768MB # Optimized for 1GB+ systems + +# Work memory: For complex queries and sorting +work_mem = 16MB # Increased from default 4MB + +# Maintenance work memory: For VACUUM, ANALYZE, CREATE INDEX +maintenance_work_mem = 128MB # Increased from default 64MB + +# ============================================================================= +# WRITE-AHEAD LOG (WAL) CONFIGURATION +# ============================================================================= +# Checkpoint completion target: Spread checkpoints over time +checkpoint_completion_target = 0.9 # Default: 0.9 (good) + +# WAL buffers: For transaction logs +wal_buffers = 16MB # Increased from default 4MB + +# WAL file size +max_wal_size = 1GB # Default: 1GB +min_wal_size = 80MB # Default: 80MB + +# ============================================================================= +# QUERY PLANNER CONFIGURATION +# ============================================================================= +# Statistics target: More accurate query planning +default_statistics_target = 100 # Default: 100 (good) + +# Random page cost: Optimized for SSD storage +random_page_cost = 1.1 # Default: 4.0 (HDD), 1.1 (SSD) + +# Effective I/O concurrency: Parallel I/O operations +effective_io_concurrency = 200 # Default: 1, 200 for SSD + +# ============================================================================= +# AUTOVACUUM CONFIGURATION +# ============================================================================= +# Enable autovacuum for automatic maintenance +autovacuum = on # Default: on + +# Autovacuum thresholds +autovacuum_vacuum_threshold = 50 # Default: 50 +autovacuum_analyze_threshold = 50 # Default: 50 + +# Autovacuum scale factors +autovacuum_vacuum_scale_factor = 0.2 # Default: 0.2 +autovacuum_analyze_scale_factor = 0.1 # Default: 0.1 + +# Autovacuum work memory +autovacuum_work_mem = 64MB # Default: -1 (uses maintenance_work_mem) + +# ============================================================================= +# LOGGING CONFIGURATION +# ============================================================================= +# Log level +log_min_messages = warning # Default: warning + +# Log statements +log_min_duration_statement = 1000 # Log queries taking >1 second + +# Log connections and disconnections +log_connections = on # Default: off +log_disconnections = on # Default: off + +# Log autovacuum activity +log_autovacuum_min_duration = 0 # Log all autovacuum activity + +# ============================================================================= +# PERFORMANCE MONITORING +# ============================================================================= +# Enable statistics collection +track_activities = on # Default: on +track_counts = on # Default: on +track_io_timing = on # Default: off (requires track_activities) + +# ============================================================================= +# SECURITY CONFIGURATION +# ============================================================================= +# SSL configuration (disabled for development - no SSL settings at all) +# ssl = off # Disable SSL for development +# ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # Strong cipher suite (when SSL enabled) + +# Connection security +tcp_keepalives_idle = 600 # TCP keepalive idle time +tcp_keepalives_interval = 30 # TCP keepalive interval +tcp_keepalives_count = 3 # TCP keepalive count + +# ============================================================================= +# LOCALE AND ENCODING +# ============================================================================= +# Character encoding +client_encoding = 'UTF8' # Default: UTF8 + +# Timezone +timezone = 'UTC' # Default: GMT + +# Locale +lc_messages = 'C' # Default: C +lc_monetary = 'C' # Default: C +lc_numeric = 'C' # Default: C +lc_time = 'C' # Default: C + +# ============================================================================= +# DEVELOPMENT OPTIMIZATIONS +# ============================================================================= +# Enable debug logging in development +log_statement = 'all' # Log all statements (development only) +log_line_prefix = '%t [%p]: [%l-1] user=%u,db=%d,app=%a,client=%h ' + +# ============================================================================= +# TUX-SPECIFIC OPTIMIZATIONS +# ============================================================================= +# Optimize for Discord bot workload +# - High read/write ratio +# - Frequent small transactions +# - Moderate connection count + +# Connection pooling hints +max_prepared_transactions = 0 # Disable prepared statements for bot usage + +# Query optimization +enable_seqscan = on # Allow sequential scans for small tables +enable_indexscan = on # Enable index scans +enable_bitmapscan = on # Enable bitmap scans +enable_hashjoin = on # Enable hash joins +enable_mergejoin = on # Enable merge joins +enable_nestloop = on # Enable nested loop joins + +# ============================================================================= +# END OF CONFIGURATION +# ============================================================================= diff --git a/docs/content/assets/stylesheets/extra.css b/docs/content/assets/stylesheets/extra.css index d0381f5a0..b562c60b3 100644 --- a/docs/content/assets/stylesheets/extra.css +++ b/docs/content/assets/stylesheets/extra.css @@ -1,201 +1,582 @@ -/* Stretch content area */ +/* Smooth scrolling and scrollbar styling */ +html { + scroll-behavior: smooth; +} + +/* Thin scrollbars */ +@supports (scrollbar-width: thin) { + html, + body { + scrollbar-width: thin; /* Firefox */ + } +} + +@supports (scrollbar-color: red) { + html, + body { + scrollbar-color: #565f89 transparent; /* Firefox - Tokyo Night muted */ + } +} + +::-webkit-scrollbar { + width: 4px; + height: 4px; +} + +::-webkit-scrollbar-thumb { + background: #565f89; /* Tokyo Night muted */ + border-radius: 2px; +} + +::-webkit-scrollbar-track { + background: transparent; +} + +.no-scrollbar { + scrollbar-width: none; /* Firefox */ + -ms-overflow-style: none; /* IE and Edge */ +} + +.no-scrollbar::-webkit-scrollbar { + display: none; /* Chrome, Safari, Opera */ +} + +/* Modern Layout */ .md-main__inner.md-grid { - /* Default 61rem */ - max-width: 75rem; + max-width: 80rem; } -/* More space at the bottom of the page. */ .md-main__inner { - margin-bottom: 1.5rem; + margin-bottom: 2rem; } - -/* override md-content min-height */ .md-content { min-height: 100vh; } -/* Shrink header and footer to the content size*/ .md-grid { - /* Default 61rem */ - max-width: 50rem; + max-width: 72rem; } +/* Header styling */ .md-banner { - background: #11111B; - color: #fff; + background: #1a1b26; + color: #c0caf5; + border-bottom: 1px solid rgba(65, 72, 104, 0.5); } -.md-banner a { - color: inherit; - text-decoration: underline; - font-style: italic; +.md-header { + background: #1a1b26; + color: #c0caf5; + border-bottom: 1px solid rgba(65, 72, 104, 0.5); + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3); } -.md-banner a:hover { - color: inherit; - text-decoration: none; +/* List formatting fixes */ +.md-typeset ul, +.md-typeset ol { + margin: 1rem 0; } -.md-banner__inner { - margin: 0 auto; +.md-typeset li { + margin: 0.25rem 0; + line-height: 1.6; } -/* header */ +.md-typeset li p { + margin: 0.25rem 0; +} -.md-header { - background: #11111B; - color: #fff; +.md-typeset ul ul, +.md-typeset ol ol, +.md-typeset ul ol, +.md-typeset ol ul { + margin: 0.5rem 0; } -.md-header a { - color: inherit; - text-decoration: underline; +/* Search bar styling */ +.md-search__form { + border-radius: 0.5rem; + background: rgba(65, 72, 104, 0.3); + border: 1px solid rgba(65, 72, 104, 0.5); } -.md-header a:hover { - color: inherit; - text-decoration: none; +.md-search__input { + border-radius: 0.5rem; + background: transparent; + color: #c0caf5; } -.md-header__inner { - margin: 0 auto; +.md-search__input::placeholder { + color: #9aa5ce; } .md-tabs { - background: #141420; - color: #fff; + background: #24283b; + color: #c0caf5; + border-bottom: 1px solid rgba(65, 72, 104, 0.5); } .md-tabs__link { - color: #fff; + color: #a9b1d6; + transition: color 0.15s ease; } .md-tabs__link:hover { - color: #fff; + color: #c0caf5; +} + +/* Hero Section */ +.hero { + background: linear-gradient(135deg, rgba(122, 162, 247, 0.1) 0%, rgba(187, 154, 247, 0.1) 100%); + border: 1px solid rgba(65, 72, 104, 0.5); + border-radius: 0.75rem; + padding: 3rem 2rem; + margin: 2rem 0; + text-align: center; +} + +.hero-title { + font-size: 3rem; + font-weight: 700; + letter-spacing: -0.025em; + margin-bottom: 1rem; + background: linear-gradient(135deg, #7aa2f7 0%, #bb9af7 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +.hero-description { + font-size: 1.25rem; + color: #9aa5ce; + margin-bottom: 2rem; + max-width: 42rem; + margin-left: auto; + margin-right: auto; } -.md-tabs__link:active { - color: #fff; +.hero-actions { + display: flex; + gap: 1rem; + justify-content: center; + flex-wrap: wrap; +} + +/* Modern Buttons */ +.btn { + display: inline-flex; + align-items: center; + justify-content: center; + padding: 0.75rem 1.5rem; + border-radius: 0.5rem; + font-weight: 500; + text-decoration: none; + transition: all 0.15s ease; + border: 1px solid transparent; +} + +.btn-primary { + background: #7aa2f7; + color: #1a1b26; + border-color: #7aa2f7; +} + +.btn-primary:hover { + background: #6d8fd6; + border-color: #6d8fd6; + color: #1a1b26; + transform: translateY(-1px); +} + +.btn-secondary { + background: transparent; + color: #a9b1d6; + border-color: rgba(65, 72, 104, 0.5); +} + +.btn-secondary:hover { + background: rgba(65, 72, 104, 0.5); + color: #c0caf5; + transform: translateY(-1px); +} + +/* Feature Grid */ +.feature-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); + gap: 1.5rem; + margin: 2rem 0; +} + +.feature-card { + background: rgba(36, 40, 59, 0.5); + border: 1px solid rgba(65, 72, 104, 0.5); + border-radius: 0.75rem; + padding: 1.5rem; + transition: all 0.15s ease; +} + +.feature-card:hover { + border-color: rgba(122, 162, 247, 0.3); + transform: translateY(-2px); +} + +.feature-icon { + font-size: 2rem; + margin-bottom: 0.5rem; +} + +.feature-card h3 { + font-size: 1.25rem; + font-weight: 600; + margin-bottom: 0.5rem; + color: #c0caf5; +} + +.feature-card p { + color: #9aa5ce; + line-height: 1.6; +} + +/* Navigation Grid */ +.nav-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); + gap: 2rem; + margin: 2rem 0; +} + +.nav-section { + background: rgba(23, 23, 23, 0.3); + border: 1px solid rgba(39, 39, 42, 0.5); + border-radius: 0.5rem; + padding: 1.5rem; +} + +.nav-section h3 { + font-size: 1.125rem; + font-weight: 600; + margin-bottom: 1rem; + color: #fafafa; +} + +.nav-section ul { + list-style: none; + padding: 0; + margin: 0; +} + +.nav-section li { + margin-bottom: 0.75rem; +} + +.nav-section a { + color: #0ea5e9; + text-decoration: none; + font-weight: 500; + transition: color 0.15s ease; +} + +.nav-section a:hover { + color: #0284c7; } [data-md-color-scheme="custom"] { - /* Tokyo Night Theme */ color-scheme: dark; - /* Main Colors */ - --md-hue: 240; - /* Base hue guess for HSL fallbacks if needed, adjust as required */ - --md-primary-fg-color: #7aa2f7; - /* Blue */ - --md-accent-fg-color: #bb9af7; - /* Magenta */ - - /* Default colors */ - --md-default-fg-color: #a9b1d6; - /* Editor Foreground */ - --md-default-fg-color--light: #565f89; - /* Comments */ - --md-default-fg-color--lighter: rgba(169, 177, 214, 0.32); - /* Lighter version of default fg */ - --md-default-fg-color--lightest: rgba(169, 177, 214, 0.12); - /* Lightest version of default fg */ - --md-default-bg-color: #11111B; - /* Editor Background (Night) */ - --md-default-bg-color--light: rgba(26, 27, 38, 0.7); - /* Lighter version of default bg */ - --md-default-bg-color--lighter: rgba(26, 27, 38, 0.4); - /* Lighter version of default bg */ - --md-default-bg-color--lightest: rgba(26, 27, 38, 0.1); - /* Lightest version of default bg */ - - /* Code colors */ + /* Tokyo Night color system */ + --md-primary-fg-color: #c0caf5; + --md-accent-fg-color: #7aa2f7; + + /* Background - Tokyo Night */ + --md-default-bg-color: #1a1b26; + --md-default-bg-color--light: #24283b; + --md-default-bg-color--lighter: #414868; + --md-default-bg-color--lightest: #565f89; + + /* Foreground - Tokyo Night */ + --md-default-fg-color: #c0caf5; + --md-default-fg-color--light: #a9b1d6; + --md-default-fg-color--lighter: #9aa5ce; + --md-default-fg-color--lightest: #565f89; + + /* Code - Tokyo Night */ --md-code-fg-color: #c0caf5; - /* Variables, Class names */ - --md-code-bg-color: #1a1b26; - /* Using main background */ - - /* Code highlighting */ - --md-code-hl-color: rgba(187, 154, 247, 0.15); - /* Accent (Magenta) with alpha */ - --md-code-hl-color--light: rgba(187, 154, 247, 0.1); - /* Accent (Magenta) with less alpha */ + --md-code-bg-color: #24283b; + --md-code-hl-color: rgba(122, 162, 247, 0.15); + --md-code-hl-color--light: rgba(122, 162, 247, 0.1); + + /* Syntax - Tokyo Night */ --md-code-hl-number-color: #ff9e64; - /* Number constants */ --md-code-hl-special-color: #f7768e; - /* Regex group symbol, CSS units */ --md-code-hl-function-color: #7aa2f7; - /* Function names */ --md-code-hl-constant-color: #ff9e64; - /* Language support constants */ --md-code-hl-keyword-color: #bb9af7; - /* Control Keywords, Storage Types */ --md-code-hl-string-color: #9ece6a; - /* Strings */ - --md-code-hl-name-color: var(--md-code-fg-color); - /* Default code foreground */ - --md-code-hl-operator-color: #bb9af7; - /* Regex symbols and operators */ - --md-code-hl-punctuation-color: #7dcfff; - /* Object properties, punctuation */ + --md-code-hl-operator-color: #89ddff; + --md-code-hl-punctuation-color: #89ddff; --md-code-hl-comment-color: #565f89; - /* Comments */ - --md-code-hl-generic-color: var(--md-default-fg-color--light); --md-code-hl-variable-color: #c0caf5; - /* Variables */ - /* Typeset colors */ + /* Typography */ --md-typeset-color: var(--md-default-fg-color); - --md-typeset-a-color: var(--md-primary-fg-color); - --md-typeset-kbd-color: #414868; - /* Terminal Black */ - --md-typeset-kbd-accent-color: #565f89; - /* Comments */ - --md-typeset-kbd-border-color: #24283b; - /* Editor Background (Storm) - slightly lighter */ - --md-typeset-mark-color: rgba(187, 154, 247, 0.3); - /* Accent (Magenta) with alpha */ - --md-typeset-table-color: rgba(169, 177, 214, 0.12); - /* Default FG lightest */ - --md-typeset-table-color--light: rgba(169, 177, 214, 0.035); - /* Even lighter */ - - /* Admonition colors */ - --md-admonition-fg-color: var(--md-default-fg-color); - --md-admonition-bg-color: rgba(41, 46, 66, 0.5); - /* #292e42 with alpha */ - - /* Footer colors */ + --md-typeset-a-color: #7aa2f7; + + /* Cards - Tokyo Night */ + --md-admonition-bg-color: rgba(36, 40, 59, 0.8); + --md-typeset-table-color: rgba(65, 72, 104, 0.5); + --md-typeset-table-color--light: rgba(65, 72, 104, 0.3); + + /* Footer */ + --md-footer-bg-color: #24283b; + --md-footer-bg-color--dark: #1a1b26; --md-footer-fg-color: var(--md-default-fg-color--light); - --md-footer-fg-color--lighter: var(--md-default-fg-color--lighter); - --md-footer-bg-color: #16161e; - /* Slightly darker than main background */ - --md-footer-bg-color--dark: #101014; - /* Even darker */ - - /* Shadows (copied from slate, generally okay for dark themes) */ - --md-shadow-z1: - 0 0.25rem 0.625rem hsla(0, 0%, 0%, 0.05), - 0 0 0.0625rem hsla(0, 0%, 0%, 0.1); - --md-shadow-z2: - 0 0.25rem 0.625rem hsla(0, 0%, 0%, 0.25), - 0 0 0.0625rem hsla(0, 0%, 0%, 0.25); - --md-shadow-z3: - 0 0.25rem 0.625rem hsla(0, 0%, 0%, 0.4), - 0 0 0.0625rem hsla(0, 0%, 0%, 0.35); - - /* Hide light-mode-only images */ - img[src$="#only-light"], - img[src$="#gh-light-mode-only"] { - display: none; - } + + /* Tokyo Night shadows */ + --md-shadow-z1: 0 1px 2px 0 rgba(0, 0, 0, 0.3); + --md-shadow-z2: 0 1px 3px 0 rgba(0, 0, 0, 0.4), 0 1px 2px -1px rgba(0, 0, 0, 0.4); + --md-shadow-z3: 0 4px 6px -1px rgba(0, 0, 0, 0.4), 0 2px 4px -2px rgba(0, 0, 0, 0.4); +} + +/* shadcn-inspired component styling */ +.md-typeset h1, +.md-typeset h2, +.md-typeset h3, +.md-typeset h4 { + font-weight: 600; + letter-spacing: -0.025em; + margin-top: 1.5rem; + margin-bottom: 0.5rem; +} + +.md-typeset h1 { + font-size: 1.75rem; + line-height: 2rem; + margin-top: 0; +} + +.md-typeset h2 { + font-size: 1.5rem; + line-height: 2rem; + margin-top: 2rem; +} + +.md-typeset h3 { + font-size: 1.125rem; + line-height: 1.5rem; +} + +.md-typeset h4 { + font-size: 1rem; + line-height: 1.375rem; +} + +/* Modern button-like elements */ +.md-nav__link:hover, +.md-tabs__link:hover { + background-color: rgba(39, 39, 42, 0.5); + border-radius: 0.375rem; + transition: all 0.15s ease; +} + +/* Card-like admonitions */ +.md-typeset .admonition { + border: 1px solid rgba(39, 39, 42, 0.5); + border-radius: 0.5rem; + background: rgba(23, 23, 23, 0.5); + backdrop-filter: blur(8px); +} + +/* Modern code blocks */ +.md-typeset .highlight { + border-radius: 0.5rem; + border: 1px solid rgba(65, 72, 104, 0.3); + overflow: hidden; + background: #24283b; +} + +.md-typeset .highlight .filename { + background: rgba(65, 72, 104, 0.4); + color: #c0caf5; + padding: 0.5rem 1rem; + font-size: 0.75rem; + font-weight: 500; + border-bottom: 1px solid rgba(65, 72, 104, 0.3); + margin: 0; +} + +.md-typeset .highlight pre { + background: #24283b !important; + border: none; + border-radius: 0; + margin: 0; + padding: 1rem; +} + +.md-typeset pre { + background: #24283b !important; + border: 1px solid rgba(65, 72, 104, 0.3); + border-radius: 0.5rem; + padding: 1rem; + overflow-x: auto; +} + +.md-typeset pre code { + background: transparent !important; + color: #c0caf5 !important; + padding: 0; + border: none; + border-radius: 0; + font-size: inherit; +} + +.md-typeset :not(pre) > code { + background: rgba(65, 72, 104, 0.3) !important; + color: #c0caf5 !important; + border: 1px solid rgba(65, 72, 104, 0.2); + border-radius: 0.25rem; + padding: 0.125rem 0.375rem; + font-size: 0.875em; +} + +/* mkdocstrings documentation styling */ +article .doc code { + background: transparent; + padding: 0; +} + +article .doc details { + margin-top: 0; +} + +article .doc .doc-children { + display: flex; + flex-direction: column; + gap: 1rem; +} + +article .doc details + .doc-children { + margin-top: 1rem; +} + +article .doc .doc-contents { + margin-top: 0.5rem; + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +article .doc .doc-contents:empty { + margin-top: 0; +} + +article .doc .doc-contents.first { + margin-top: 1.5rem; +} + +article .typography > div:first-child > h2 { + margin-top: 0; +} + +article .doc .doc-contents p { + color: #9aa5ce; + font-size: 0.875rem; + margin-top: 0; +} + +article .doc .doc-contents h3 { + font-size: 1.125rem; +} + +article .doc .doc-contents h3 code { + font-size: 1rem; +} + +article .doc .doc-contents code { + border-radius: 0; +} + +article .doc .doc-contents ul { + margin-top: 0; +} + +article .doc .doc-contents details { + padding: 0.5rem 0; +} + +article .doc .doc-contents table:not(.codehilitetable) { + font-size: 0.875rem; + margin-top: 0.5rem; + width: 100%; +} + +article .doc .doc-contents table th { + font-weight: 500; +} + +article .doc .doc-contents .doc-class-bases { + color: #c0caf5; +} + +article .doc .doc-contents .doc-section-title { + color: #c0caf5; + font-weight: 600; +} + +article .doc .doc-object { + padding: 1.25rem; + background: rgba(36, 40, 59, 0.5); + border-radius: 0.5rem; + border: 1px solid rgba(65, 72, 104, 0.3); +} + +article .doc .doc-object.doc-function { + background: rgba(41, 37, 36, 0.5); +} + +article .doc .doc-object h2, +article .doc .doc-object h3, +article .doc .doc-object h4 { + margin-top: 0; + display: flex; + flex-direction: row; + align-items: center; + justify-content: space-between; +} + +article .doc .doc-object .doc-labels { + display: flex; + flex-direction: row; + gap: 0.5rem; +} + +article .doc .doc-object .doc-contents { + color: #9aa5ce; + font-size: 0.875rem; +} + +article .doc .doc-object .doc-contents ul > li { + margin-top: 0.25rem; +} + +article .doc .doc-object code { + margin-top: 0; +} + +article .doc small code { + font-weight: 400; + color: #9aa5ce; +} + +article .doc .doc-class-bases code { + font-weight: 500; } -/* Mark external links as such (also in nav) */ +/* External link styling */ a.external:hover::after, a.md-nav__link[href^="https:"]:hover::after { - /* https://primer.style/octicons/link-external-16 */ - background-image: url('data:image/svg+xml,'); + background-image: url('data:image/svg+xml,'); height: 0.8em; width: 0.8em; margin-left: 0.2em; diff --git a/docs/content/community/contributing.md b/docs/content/community/contributing.md new file mode 100644 index 000000000..c38c2bdb5 --- /dev/null +++ b/docs/content/community/contributing.md @@ -0,0 +1,572 @@ +# Contributing to Tux + +Thank you for your interest in contributing to Tux! This guide covers everything you need to know to +contribute effectively. + +## Getting Started + +### Ways to Contribute + +**Code Contributions:** + +- Bug fixes +- New features +- Performance improvements +- Code refactoring +- Test improvements + +**Documentation:** + +- Fix typos and errors +- Improve existing documentation +- Add missing documentation +- Create tutorials and guides + +**Community Support:** + +- Help users in Discord +- Answer questions on GitHub +- Report bugs +- Test new features + +**Design & UX:** + +- UI/UX improvements +- Bot response design +- Documentation design +- Asset creation + +### Before You Start + +1. **Read the Code of Conduct** - Be respectful and inclusive +2. **Check existing issues** - Avoid duplicate work +3. **Join our Discord** - Get help and discuss ideas +4. **Set up development environment** - Follow the development setup guide + +## Development Process + +### 1. Fork and Clone + +```bash +# Fork the repository on GitHub +# Then clone your fork +git clone https://github.com/YOUR_USERNAME/tux.git +cd tux + +# Add upstream remote +git remote add upstream https://github.com/allthingslinux/tux.git +``` + +### 2. Create Feature Branch + +```bash +# Create and switch to feature branch +git checkout -b feature/your-feature-name + +# Branch naming conventions: +# feature/description - New features +# fix/description - Bug fixes +# docs/description - Documentation updates +# refactor/description - Code refactoring +``` + +### 3. Set Up Development Environment + +```bash +# Install dependencies +uv sync + +# Set up pre-commit hooks +uv run dev pre-commit install + +# Configure environment +cp .env.example .env +# Edit .env with your test bot token and database + +# Set up database +createdb tux_dev +uv run db migrate-push +``` + +### 4. Make Changes + +**Code Quality Standards:** + +- Follow existing code style +- Add type hints to all functions +- Write docstrings for public functions +- Add tests for new functionality +- Update documentation as needed + +**Commit Message Format:** + +```text +type(scope): description + +Examples: +feat(moderation): add timeout command +fix(database): resolve connection pool issue +docs(api): update database documentation +refactor(core): simplify permission system +test(moderation): add ban command tests +``` + +### 5. Test Your Changes + +```bash +# Run all quality checks +uv run dev all + +# Run tests +uv run test run + +# Test manually with your bot +uv run tux start --debug +``` + +### 6. Submit Pull Request + +```bash +# Push to your fork +git push origin feature/your-feature-name + +# Create pull request on GitHub +# Fill out the PR template completely +``` + +## Code Guidelines + +### Python Style + +**Follow PEP 8:** + +- Use 4 spaces for indentation +- Line length limit of 88 characters +- Use snake_case for functions and variables +- Use PascalCase for classes +- Use UPPER_CASE for constants + +**Type Hints:** + +```python +# Always use type hints +async def create_case( + self, + case_type: str, + user_id: int, + reason: str | None = None +) -> Case: + """Create a new moderation case.""" + pass +``` + +**Docstrings:** + +```python +async def ban_user(self, user_id: int, reason: str) -> Case: + """Ban a user from the server. + + Args: + user_id: Discord user ID to ban + reason: Reason for the ban + + Returns: + Created case instance + + Raises: + PermissionError: If bot lacks ban permissions + ValueError: If user_id is invalid + """ +``` + +**Error Handling:** + +```python +# Be specific with exception handling +try: + result = await risky_operation() +except SpecificError as e: + logger.warning(f"Expected error: {e}") + return None +except Exception as e: + logger.error(f"Unexpected error: {e}", exc_info=True) + raise +``` + +### Discord.py Best Practices + +**Command Structure:** + +```python +@commands.hybrid_command() +@has_permission("moderator") +async def example(self, ctx: TuxContext, user: discord.Member, *, reason: str = None): + """Example command with proper structure.""" + try: + # Validate input + if user == ctx.author: + await ctx.send("You cannot target yourself.") + return + + # Perform action + result = await self.perform_action(user, reason) + + # Send response + embed = discord.Embed( + title="Action Completed", + description=f"Successfully performed action on {user.mention}", + color=discord.Color.green() + ) + await ctx.send(embed=embed) + + except Exception as e: + # Error handling is done by global error handler + raise +``` + +**Database Operations:** + +```python +# Use the database coordinator +async def create_case_example(self, user_id: int, guild_id: int): + case = await self.db.case.create_case( + case_type="BAN", + case_user_id=user_id, + case_moderator_id=self.bot.user.id, + guild_id=guild_id, + case_reason="Example ban" + ) + return case +``` + +### Testing Guidelines + +**Test Structure:** + +```python +import pytest +from unittest.mock import AsyncMock, MagicMock + +@pytest.mark.asyncio +async def test_ban_command(mock_ctx, mock_db): + """Test ban command functionality.""" + # Arrange + cog = ModerationCog(mock_bot) + user = MagicMock() + user.id = 123456789 + + # Act + await cog.ban(mock_ctx, user, reason="Test ban") + + # Assert + mock_db.case.create_case.assert_called_once() + mock_ctx.send.assert_called_once() +``` + +**Test Categories:** + +- Unit tests for individual functions +- Integration tests for command workflows +- Database tests for data operations +- Mock tests for external dependencies + +## Documentation Guidelines + +### Writing Style + +**Clear and Concise:** + +- Use simple, direct language +- Avoid jargon when possible +- Explain technical terms +- Use active voice + +**Structure:** + +- Start with overview/purpose +- Provide step-by-step instructions +- Include code examples +- Add troubleshooting section + +**Code Examples:** + +```python +# Always include complete, working examples +# Add comments to explain complex parts +# Use realistic data in examples + +# Good example: +async def example_function(): + """Example with clear purpose and usage.""" + user_id = 123456789 # Discord user ID + case = await db.case.create_case( + case_type="WARN", + case_user_id=user_id, + case_reason="Example warning" + ) + return case +``` + +### Documentation Types + +**API Documentation:** + +- Use docstrings for all public functions +- Include parameter types and descriptions +- Document return values and exceptions +- Provide usage examples + +**User Documentation:** + +- Focus on practical usage +- Include screenshots when helpful +- Provide troubleshooting tips +- Keep up-to-date with features + +**Developer Documentation:** + +- Explain architecture decisions +- Document development workflows +- Include setup instructions +- Provide debugging guides + +## Issue Guidelines + +### Bug Reports + +**Use the bug report template:** + +- Clear description of the issue +- Steps to reproduce +- Expected vs actual behavior +- Environment information +- Relevant logs or screenshots + +**Good Bug Report:** + +```text +**Bug Description:** +The ban command fails when trying to ban a user with a very long username. + +**Steps to Reproduce:** +1. Use `/ban @user_with_very_long_username spam` +2. Bot responds with "An error occurred" + +**Expected Behavior:** +User should be banned and case created + +**Actual Behavior:** +Command fails with database error + +**Environment:** +- Tux version: v1.2.3 +- Python version: 3.13.0 +- Database: PostgreSQL 15 + +**Logs:** +``` + +ERROR: value too long for type character varying(50) + +```text +``` + +### Feature Requests + +**Use the feature request template:** + +- Clear description of the feature +- Use cases and benefits +- Possible implementation approach +- Alternative solutions considered + +**Good Feature Request:** + +```text +**Feature Description:** +Add ability to set temporary bans with automatic unbanning + +**Use Case:** +Moderators want to ban users for specific time periods (1 day, 1 week, etc.) +without manually tracking when to unban them. + +**Proposed Solution:** +Add duration parameter to ban command: +`/ban @user spam --duration 7d` + +**Benefits:** +- Reduces moderator workload +- Ensures consistent enforcement +- Prevents forgotten unbans +``` + +## Pull Request Guidelines + +### PR Requirements + +**Before Submitting:** + +- [ ] All tests pass +- [ ] Code follows style guidelines +- [ ] Documentation updated +- [ ] No merge conflicts +- [ ] Feature tested manually + +**PR Description:** + +- Clear title describing the change +- Detailed description of what changed +- Link to related issues +- Screenshots for UI changes +- Breaking changes noted + +**Good PR Example:** + +```text +## Add timeout command for moderation + +### Changes +- Added new `/timeout` command to moderation module +- Implemented database support for timeout cases +- Added tests for timeout functionality +- Updated documentation + +### Related Issues +Closes #123 + +### Testing +- [x] Unit tests pass +- [x] Integration tests pass +- [x] Manually tested in development server +- [x] Tested edge cases (invalid duration, missing permissions) + +### Screenshots +[Include screenshots of command in action] + +### Breaking Changes +None +``` + +### Review Process + +**What Reviewers Look For:** + +- Code quality and style +- Test coverage +- Documentation completeness +- Performance implications +- Security considerations + +**Addressing Feedback:** + +- Respond to all review comments +- Make requested changes promptly +- Ask questions if feedback is unclear +- Update PR description if scope changes + +## Community Guidelines + +### Code of Conduct + +**Be Respectful:** + +- Treat everyone with respect +- Be inclusive and welcoming +- Avoid discriminatory language +- Respect different opinions + +**Be Constructive:** + +- Provide helpful feedback +- Focus on the code, not the person +- Suggest improvements +- Help others learn + +**Be Professional:** + +- Keep discussions on-topic +- Avoid personal attacks +- Use appropriate language +- Maintain confidentiality when needed + +### Communication Channels + +**Discord Server:** + +- General discussion +- Getting help +- Feature discussions +- Community support + +**GitHub Issues:** + +- Bug reports +- Feature requests +- Technical discussions +- Project planning + +**GitHub Discussions:** + +- Long-form discussions +- Ideas and proposals +- Q&A +- Show and tell + +### Recognition + +**Contributors are recognized through:** + +- GitHub contributor list +- Discord contributor role +- Mention in release notes +- Special thanks in documentation + +**Types of Contributions Recognized:** + +- Code contributions +- Documentation improvements +- Bug reports and testing +- Community support +- Design and UX work + +## Getting Help + +### Resources + +**Documentation:** + +- Developer setup guide +- API documentation +- Architecture overview +- Troubleshooting guides + +**Community:** + +- Discord server for real-time help +- GitHub discussions for detailed questions +- Stack Overflow for general Python/Discord.py questions + +**Mentorship:** + +- New contributors can request mentorship +- Experienced contributors help review PRs +- Pair programming sessions available +- Code review feedback and guidance + +### Common Questions + +**Q: How do I get started contributing?** +A: Start with the "good first issue" label on GitHub, set up your development environment, and join +our Discord for help. + +**Q: What should I work on?** +A: Check the issues labeled "help wanted" or "good first issue". You can also propose new features +or improvements. + +**Q: How long do PR reviews take?** +A: We aim to review PRs within 48-72 hours. Complex PRs may take longer. + +**Q: Can I work on multiple issues at once?** +A: It's better to focus on one issue at a time, especially when starting out. + +Thank you for contributing to Tux! Your contributions help make the bot better for everyone. diff --git a/docs/content/community/faq.md b/docs/content/community/faq.md new file mode 100644 index 000000000..cd2bedcee --- /dev/null +++ b/docs/content/community/faq.md @@ -0,0 +1,328 @@ +# Frequently Asked Questions + +Common questions and answers about Tux. + +## General Questions + +### What is Tux? + +Tux is an all-in-one Discord bot designed for the All Things Linux Discord server, but available for +any server. It provides moderation tools, utility commands, fun features, and more. + +### Is Tux free to use? + +Yes! Tux is completely free and open source. You can invite it to your server or self-host your own +instance. + +### How do I invite Tux to my server? + +Use the official invite link from our website or GitHub repository. You'll need Administrator +permissions in your Discord server. + +### What permissions does Tux need? + +**Basic functionality:** + +- Read Messages/View Channels +- Send Messages +- Embed Links +- Read Message History + +**Moderation features:** + +- Kick Members +- Ban Members +- Manage Messages +- Moderate Members (for timeouts) +- Manage Roles (for jail system) + +### Can I use both slash commands and prefix commands? + +Yes! Tux supports hybrid commands. Most commands work with both `/command` (slash) and `!command` +(prefix) formats. + +## Setup and Configuration + +### How do I change the command prefix? + +Use `/config prefix set `. For example: `/config prefix set ?` + +### How do I set up moderation logging? + +Use `/config logs set Public` to configure where moderation actions are logged. + +### How do I configure the permission system? + +Use `!permission assign ` to set permission levels. Available levels are configured by +server administrators. + +### How do I set up the jail system? + +1. Create a jail role with restricted permissions +2. Create a jail channel +3. Configure through server admin commands + +### How do I enable the starboard? + +Starboard is automatically enabled when messages receive enough ⭐ reactions. + +## Commands and Features + +### How do I see all available commands? + +Use `/help` or `!help` to see all commands. Use `/help ` for specific command help. + +### Why can't I use certain commands? + +Commands may be restricted by: + +- Permission level requirements +- Role-based assignments +- Channel restrictions +- Bot permissions + +Check with server administrators about your permission level. + +### How do I create and use snippets? + +```text +!createsnippet # Create snippet +! # Use snippet +!listsnippets # List all snippets +!deletesnippet # Delete snippet +``` + +### How does the leveling system work? + +Users gain XP by participating in chat. Use `/level` to check your level. + +### How do I set reminders? + +Use `/remindme
+
+

Modern Discord Bot for Linux Communities

+

+ Tux is a powerful, feature-rich Discord bot built with Python 3.13+ and designed specifically for the All Things Linux community. Get started in minutes with our comprehensive documentation. +

+
+
+