Skip to content

Removes Flake8 and Black configurations, replaces with Ruff for linting #107

Removes Flake8 and Black configurations, replaces with Ruff for linting

Removes Flake8 and Black configurations, replaces with Ruff for linting #107

Workflow file for this run

# This workflow integrates multiple Python security and quality analysis tools with SonarQube.
# It runs various static code analysis tools and security scanners in parallel, then aggregates
# their results into SonarQube for unified reporting.
#
# Tools included:
# - Bandit: Security-focused static analyser
# - Ruff: Fast Python linter
# - MyPy: Static type checker
# - Flake8: Style guide enforcer, but not enabled, because it has poor compatibility with pyproject.toml
# - Pylint: Static code analyser
# - CodeQL: Semantic code analysis
# - Snyk: Dependency and security scanner
#
# Known Limitations:
# - SonarQube currently doesn't support Pyright report format natively
# - SBOM (Software Bill of Materials) imports are currently not supported in SonarQube Cloud
# - SonarCube Cloud doesn't allow downloading SaRIF reports. If we find a way to do this, we can uploaded them to CodeQL.
#
# The workflow runs weekly and on all PR/push events to main branch.
name: 🛡️ Bastion Quality Gates
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
schedule:
- cron: '0 0 * * 0' # Weekly on Sunday
workflow_dispatch:
permissions:
pull-requests: read
security-events: write
contents: read
jobs:
Python_Tests:
uses: ./.github/workflows/python-tests.yml
# This workflow now depends on the successful completion of the python-tests workflow
Analyze:
needs: Python_Tests
runs-on: ubuntu-latest
strategy:
matrix:
tool: [bandit, ruff, mypy, pylint, codeql, snyk]
steps:
- name: Checkout code
uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
- name: Set up Python
if: matrix.tool != 'codeql' # CodeQL has its own Python setup
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55
with:
python-version: '3.13'
- name: Cache pip dependencies
if: matrix.tool != 'codeql'
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684
with:
path: |
~/.cache/pip
~/.cache/pypoetry
/usr/local/lib/python3.13/site-packages
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock', 'pyproject.toml') }}
restore-keys: |
${{ runner.os }}-poetry-
# Consolidated Poetry installation - used by multiple steps
- name: Install Poetry
if: matrix.tool != 'codeql'
run: |
pip install poetry
if [ "${{ matrix.tool }}" = "snyk" ]; then
pip install poetry-plugin-export
fi
poetry --version
- name: Install project dependencies
if: matrix.tool != 'codeql'
run: |
# Only install the dependencies needed for each specific tool
case "${{ matrix.tool }}" in
snyk)
# Snyk only needs dependency export
;;
*)
# Analysis tools need dev dependencies
poetry install --with dev --no-interaction
;;
esac
# Optimized tool installation - only install the tool being used
- name: Install analysis tools
if: matrix.tool != 'codeql' && matrix.tool != 'snyk'
run: |
case "${{ matrix.tool }}" in
bandit)
poetry run pip install bandit
poetry run bandit --version
;;
ruff)
poetry run pip install ruff
poetry run ruff --version
;;
mypy)
poetry run pip install mypy types-setuptools types-requests
poetry run mypy --version
;;
flake8)
poetry run pip install flake8
poetry run flake8 --version
;;
pylint)
poetry run pip install pylint
poetry run pylint --version
;;
*)
echo "Unknown tool: ${{ matrix.tool }}"
exit 1
;;
esac
- name: Setup Snyk CLI
if: matrix.tool == 'snyk'
uses: snyk/actions/setup@cdb760004ba9ea4d525f2e043745dfe85bb9077e
with:
snyk-version: latest
- name: Run Snyk Security Scan
if: matrix.tool == 'snyk'
env:
SNYK_TOKEN: ${{ secrets.SNYK_SECRET_TOKEN }}
DEBUG: snyk*
run: |
# Export dependencies from Poetry to requirements.txt
poetry export --format requirements.txt --output requirements.txt --with dev
# Run Snyk test with the generated requirements file
snyk test --file=requirements.txt --sarif-file-output=snyk_report.sarif --skip-unresolved || snyk test --file=requirements.txt --sarif-file-output=snyk_report.sarif --skip-unresolved --all-projects
# Tool-specific analysis steps
- name: Run Bandit
if: matrix.tool == 'bandit'
run: poetry run bandit -r . -o bandit_report.json --format json --exclude tests,.git || true
- name: Run Ruff
if: matrix.tool == 'ruff'
run: poetry run ruff check . --output-format json --output-file ruff_report.json --exclude tests,.git || true
- name: Run Mypy
if: matrix.tool == 'mypy'
run: poetry run mypy . 2>&1 | tee mypy_report.txt || true
- name: Run Flake8
if: matrix.tool == 'flake8'
run: poetry run flake8 . --output-file flake8_report.txt --format=pylint || true
- name: Run Pylint
if: matrix.tool == 'pylint'
run: poetry run pylint --recursive=y . --output-format=json > pylint_report.json || true
- name: Initialize CodeQL
if: matrix.tool == 'codeql'
uses: github/codeql-action/init@486ab5a2922b634015408a83e10f6867efb5922c
with:
languages: python
- name: Perform CodeQL Analysis
if: matrix.tool == 'codeql'
uses: github/codeql-action/analyze@486ab5a2922b634015408a83e10f6867efb5922c
with:
output: codeql_report.sarif
# Generate SHA3-256 hashes for all report files
- name: Generate SHA3-256 hash for report files
run: |
python -c "
import hashlib, sys, os, glob
# Define report file patterns based on the tool
if '${{ matrix.tool }}' == 'codeql':
# CodeQL outputs to a directory with python.sarif inside
if os.path.isdir('codeql_report.sarif') and os.path.exists('codeql_report.sarif/python.sarif'):
report_files = ['codeql_report.sarif/python.sarif']
else:
# Fallback if structure is different
sarif_files = glob.glob('codeql_report.sarif/**/*.sarif', recursive=True)
if sarif_files:
report_files = sarif_files
else:
print('Warning: No SARIF files found in codeql_report.sarif directory')
report_files = []
elif '${{ matrix.tool }}' == 'snyk':
report_files = ['snyk_report.sarif']
else:
# Handle standard tool reports
extensions = {'bandit': 'json', 'ruff': 'json', 'pylint': 'json', 'mypy': 'txt', 'flake8': 'txt'}
extension = extensions.get('${{ matrix.tool }}', 'txt')
report_files = ['${{ matrix.tool }}_report.' + extension]
# Generate hash for each report file
for report_file in report_files:
if os.path.exists(report_file):
with open(report_file, 'rb') as f:
content = f.read()
hash_obj = hashlib.sha3_256(content)
with open(report_file + '.sha3', 'w') as hf:
hf.write(hash_obj.hexdigest())
print(f'Generated hash for {report_file}')
else:
print(f'Warning: Report file {report_file} not found')
"
- name: Upload report artifact
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
with:
name: ${{ matrix.tool }}-report
path: |
${{ matrix.tool }}_report.*
codeql_report.sarif/**/*.sarif*
codeql_report.sarif/**/*.sarif.sha3
snyk_report.sarif*
SonarQube:
needs: Analyze
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
with:
fetch-depth: 0
- name: Download analysis reports
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
with:
path: reports
- name: Process reports and prepare for SonarQube
run: |
mkdir -p processed_reports
# Function to verify SHA3-256 hash
verify_hash() {
local file_path="$1"
local hash_path="${file_path}.sha3"
if [ -f "$file_path" ] && [ -f "$hash_path" ]; then
echo "Verifying hash for $file_path"
expected_hash=$(cat "$hash_path")
computed_hash=$(python -c "import hashlib; data = open('$file_path', 'rb').read(); print(hashlib.sha3_256(data).hexdigest())")
if [ "$expected_hash" = "$computed_hash" ]; then
echo "✅ Hash verification successful for $file_path"
return 0
else
echo "❌ Hash verification FAILED for $file_path"
echo "Expected: $expected_hash"
echo "Computed: $computed_hash"
return 1
fi
elif [ -f "$file_path" ]; then
echo "⚠️ Warning: No hash file found for $file_path"
return 2
else
echo "⚠️ Warning: File not found: $file_path"
return 3
fi
}
# Function to safely move reports with hash verification
safe_move_report() {
local source_dir="$1"
local report_file="$2"
local target_file="$3"
# First try direct path
if [ -f "${source_dir}/${report_file}" ]; then
echo "✅ Found ${report_file}"
# Verify hash before copying
if verify_hash "${source_dir}/${report_file}" || [ $? -eq 2 ]; then
cp "${source_dir}/${report_file}" "${target_file}"
return 0
else
echo "⚠️ Hash verification failed for ${report_file}, skipping"
return 4
fi
# Handle CodeQL special case - check for python.sarif inside the directory
elif [[ "${report_file}" == "codeql_report.sarif" ]] && [ -d "${source_dir}/${report_file}" ] && [ -f "${source_dir}/${report_file}/python.sarif" ]; then
echo "✅ Found ${report_file}/python.sarif (CodeQL directory structure)"
# Verify hash before copying
if verify_hash "${source_dir}/${report_file}/python.sarif" || [ $? -eq 2 ]; then
cp "${source_dir}/${report_file}/python.sarif" "${target_file}"
return 0
else
echo "⚠️ Hash verification failed for ${report_file}/python.sarif, skipping"
return 4
fi
else
echo "⚠️ Warning: ${report_file} not found in ${source_dir}"
# Create fallback file...
# For JSON reports, create an empty valid JSON file
if [[ "${report_file}" == *".json" ]]; then
echo "Creating empty JSON file for ${target_file}"
echo "[]" > "${target_file}"
# For SARIF reports, create a minimal valid SARIF file
elif [[ "${report_file}" == *".sarif" ]]; then
echo "Creating minimal SARIF file for ${target_file}"
echo '{"version":"2.1.0","runs":[{"tool":{"driver":{"name":"Missing Report","rules":[]}},"results":[]}]}' > "${target_file}"
# For text reports, create an empty file
else
echo "Creating empty file for ${target_file}"
touch "${target_file}"
fi
return 1
fi
}
# Initialize list of available report paths for SonarQube
sonar_args=""
# Process each report type
safe_move_report "reports/bandit-report" "bandit_report.json" "processed_reports/bandit_report.json"
if [ $? -eq 0 ]; then
sonar_args="${sonar_args} -Dsonar.python.bandit.reportPaths=processed_reports/bandit_report.json"
fi
safe_move_report "reports/ruff-report" "ruff_report.json" "processed_reports/ruff_report.json"
if [ $? -eq 0 ]; then
sonar_args="${sonar_args} -Dsonar.python.ruff.reportPaths=processed_reports/ruff_report.json"
fi
safe_move_report "reports/mypy-report" "mypy_report.txt" "processed_reports/mypy_report.txt"
if [ $? -eq 0 ]; then
sonar_args="${sonar_args} -Dsonar.python.mypy.reportPaths=processed_reports/mypy_report.txt"
fi
safe_move_report "reports/pylint-report" "pylint_report.json" "processed_reports/pylint_report.json"
if [ $? -eq 0 ]; then
sonar_args="${sonar_args} -Dsonar.python.pylint.reportPaths=processed_reports/pylint_report.json"
fi
# Process SARIF reports and combine into a single list if both exist
sarif_reports=""
safe_move_report "reports/codeql-report" "codeql_report.sarif" "processed_reports/codeql_report.sarif"
if [ $? -eq 0 ]; then
sarif_reports="processed_reports/codeql_report.sarif"
fi
safe_move_report "reports/snyk-report" "snyk_report.sarif" "processed_reports/snyk_report.sarif"
if [ $? -eq 0 ]; then
if [ -n "$sarif_reports" ]; then
sarif_reports="${sarif_reports},processed_reports/snyk_report.sarif"
else
sarif_reports="processed_reports/snyk_report.sarif"
fi
fi
if [ -n "$sarif_reports" ]; then
sonar_args="${sonar_args} -Dsonar.sarifReportPaths=${sarif_reports}"
fi
# Store SonarQube args in environment variable for next step
echo "SONAR_EXTRA_ARGS=${sonar_args}" >> $GITHUB_ENV
# Print summary
echo "✨ Report processing complete. SonarQube will use the following reports:"
echo "${sonar_args}"
- name: Analyze with SonarQube
uses: SonarSource/sonarqube-scan-action@aa494459d7c39c106cc77b166de8b4250a32bb97
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
with:
args: >
-Dsonar.projectKey=DavidOsipov_PostQuantum-Feldman-VSS
-Dsonar.organization=davidosipov
-Dsonar.python.version=3.10,3.11,3.12,3.13
-Dsonar.languages=python
-Dsonar.python.analyzeGeneratedCode=true
-Dsonar.verbose=false
${{ env.SONAR_EXTRA_ARGS }}