Skip to content

Linting with locally installed spectral-cli #9

Linting with locally installed spectral-cli

Linting with locally installed spectral-cli #9

Workflow file for this run

# Lint OpenAPI files found in /specs and store the result in BigQuery
name: Entur/Api/Lint
on: [push]
jobs:
lint-and-store-result:
name: OpenAPI Lint
runs-on: ubuntu-24.04
environment: dev
permissions:
contents: read
id-token: write
steps:
- name: Checkout host repository
uses: actions/checkout@v4
# Setup Node.js, install Spectral CLI, and run Spectral Linting
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '16'
- name: Install Spectral CLI
run: npm install -g @stoplight/spectral-cli
- name: Run Spectral Linting
id: lint
run: spectral lint ./specs/* -o lint-result.json -f json --quiet || true
- id: dump-result
shell: bash
run: cat lint-result.json
- name: Transform lint results into grouped JSON
run: |
# Get the spec source file paths from lint-result.json
sources=$(jq -r '.[].source' lint-result.json | sort -u)
# Initialize an empty array to hold our row objects
rows_array=()
# Iterate over each unique source
while IFS= read -r src; do
echo "Processing spec file: $src"
if [[ "$src" == *.json ]]; then
# If the file is JSON, use jq to extract the service name.
service=$(jq -r '.info.title' "$src")
else
# Otherwise, assume it's YAML and use yq.
service=$(yq e '.info.title' "$src")
fi
# Get rule violations from lint-result.json for entries with this source and map each to {rule, severity}
results=$(jq --arg src "$src" '[.[] | select(.source == $src) | {rule: .code, severity: .severity}]' lint-result.json)
# Build a row JSON object with the extracted service name and the result array
row=$(jq -n --arg service "$service" --argjson results "$results" '{json: {service: $service, result: $results}}')
# Append the row to our rows_array
rows_array+=("$row")
done <<< "$sources"
# Combine all rows into the final JSON structure
combined=$(printf '%s\n' "${rows_array[@]}" | jq -s '{rows: .}')
# Write the output to a file (or you could use it further in your workflow)
echo "$combined" > transformed.json
cat transformed.json
- id: login-gcp
if: false #disabled for now
name: Authenticate with Google Cloud
uses: google-github-actions/[email protected]
with:
workload_identity_provider: ${{ vars.WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ vars.SERVICE_ACCOUNT }}
token_format: 'access_token'
- id: write-to-bigquery
if: false #disabled for now
name: Write to BigQuery
shell: bash
run: |
curl --request POST \
'https://bigquery.googleapis.com/bigquery/v2/projects/ent-apidata-dev/datasets/api_lint/tables/lint_result/insertAll' \
--header 'Authorization: Bearer ${{ steps.login-gcp.outputs.access_token }}' \
--header 'Accept: application/json' \
--header 'Content-Type: application/json' \
--data @transformed.json \
--compressed