Skip to content

external-pentests

external-pentests #112

name: external-pentests
on:
schedule:
# Daily at 00:00 UTC
- cron: "0 0 * * *"
workflow_dispatch:
inputs:
targets:
description: "Override target URLs (one per line). Leave empty to use defaults."
required: false
default: ""
nuclei_rate_limit:
description: "Nuclei requests per second"
required: false
default: "5"
nuclei_concurrency:
description: "Nuclei template concurrency"
required: false
default: "5"
nuclei_retries:
description: "Nuclei request retries"
required: false
default: "2"
nuclei_timeout_seconds:
description: "Nuclei request timeout (seconds)"
required: false
default: "10"
zap_timeout_minutes:
description: "Per-target ZAP full scan timeout (minutes)"
required: false
default: "15"
instance_host_timeout_minutes:
description: "Per-host Nmap timeout (minutes)"
required: false
default: "45"
concurrency:
group: external-pentests-${{ github.ref_name }}
cancel-in-progress: false
permissions:
contents: read
security-events: write
env:
TARGETS_DEFAULT: |
https://devsh.eu
https://www.devsh.eu
https://blog.devsh.eu
https://kimai2.devsh.eu
https://monitoring.devsh.eu
https://oncall.devsh.eu
https://flux-hook.devsh.eu
https://k8s.devsh.eu
jobs:
prepare-targets:
name: Prepare target list
runs-on: ubuntu-latest
outputs:
targets_json: ${{ steps.targets.outputs.targets_json }}
targets_list: ${{ steps.targets.outputs.list }}
targets_count: ${{ steps.targets.outputs.count }}
env:
TARGETS_INPUT: ${{ github.event.inputs.targets || '' }}
steps:
- name: Build target list
id: targets
run: |
set -euo pipefail
targets_raw="${TARGETS_INPUT}"
if [[ -z "${targets_raw}" ]]; then
targets_raw="${TARGETS_DEFAULT}"
fi
echo "${targets_raw}" | tr -d '\r' | sed '/^[[:space:]]*$/d' | awk '!seen[$0]++' > targets.txt
count="$(wc -l < targets.txt | tr -d ' ')"
echo "count=${count}" >> "${GITHUB_OUTPUT}"
echo "list<<EOF" >> "${GITHUB_OUTPUT}"
cat targets.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
targets_json="$(python - <<'PY'
import json
targets = []
with open("targets.txt", "r", encoding="utf-8") as handle:
for line in handle:
line = line.strip()
if line:
targets.append(line)
print(json.dumps(targets))
PY
)"
echo "targets_json=${targets_json}" >> "${GITHUB_OUTPUT}"
web-pentests:
name: Web apps (ZAP + Nuclei) / ${{ matrix.target }}
needs: prepare-targets
strategy:
fail-fast: false
matrix:
target: ${{ fromJson(needs.prepare-targets.outputs.targets_json) }}
# Keep prod-only to avoid accidental scans from other branches.
if: github.ref_name == 'env/prod'
runs-on: ubuntu-latest
env:
PENTESTS_DISCORD_WEBHOOK_URL: ${{ secrets.PENTESTS_DISCORD_WEBHOOK_URL }}
TARGETS_INPUT: ${{ matrix.target }}
NUCLEI_RATE_LIMIT: ${{ github.event.inputs.nuclei_rate_limit || '5' }}
NUCLEI_CONCURRENCY: ${{ github.event.inputs.nuclei_concurrency || '5' }}
NUCLEI_RETRIES: ${{ github.event.inputs.nuclei_retries || '2' }}
NUCLEI_TIMEOUT_SECONDS: ${{ github.event.inputs.nuclei_timeout_seconds || '10' }}
ZAP_TIMEOUT_MINUTES: ${{ github.event.inputs.zap_timeout_minutes || '15' }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Prepare targets
id: targets
run: |
set -euo pipefail
targets_raw="${TARGETS_INPUT}"
if [[ -z "${targets_raw}" ]]; then
targets_raw="${TARGETS_DEFAULT}"
fi
echo "${targets_raw}" | tr -d '\r' | sed '/^[[:space:]]*$/d' | awk '!seen[$0]++' > targets.txt
count="$(wc -l < targets.txt | tr -d ' ')"
echo "count=${count}" >> "${GITHUB_OUTPUT}"
echo "list<<EOF" >> "${GITHUB_OUTPUT}"
cat targets.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
- name: OWASP ZAP full scan (unauthenticated)
id: zap
run: |
set -euo pipefail
mkdir -p reports/zap
high_total=0
medium_total=0
low_total=0
info_total=0
error_total=0
high_names=""
summary_lines=""
error_targets=""
while IFS= read -r url; do
slug="$(echo "${url}" | sed -e 's|^https\?://||' -e 's|[^A-Za-z0-9._-]|_|g')"
out_json="reports/zap/${slug}.json"
out_html="reports/zap/${slug}.html"
out_md="reports/zap/${slug}.md"
out_log="reports/zap/${slug}.log"
echo "ZAP scan ${url}"
# ZAP full scan returns non-zero for warnings/failures; we ignore the exit code here
# and gate explicitly on high-risk findings.
set +e
timeout "${ZAP_TIMEOUT_MINUTES}m" docker run --rm \
-v "${PWD}:/zap/wrk" \
zaproxy/zap-stable \
zap-full-scan.py \
-t "${url}" \
-J "${out_json}" \
-r "${out_html}" \
-w "${out_md}" \
2>&1 | tee "${out_log}"
code=${PIPESTATUS[0]}
set -e
status="ok"
if [[ "${code}" -eq 124 ]]; then
status="timeout"
elif [[ "${code}" -ge 125 ]]; then
status="docker_error"
fi
high_count=0
medium_count=0
low_count=0
info_count=0
if [[ -s "${out_json}" ]]; then
counts="$(jq -r '
def count($risk):
[.site[].alerts[]? | select(.riskdesc | startswith($risk))] | length;
[count("High"), count("Medium"), count("Low"), count("Informational")] | @tsv
' "${out_json}" 2>/dev/null || true)"
if [[ -n "${counts}" ]]; then
IFS=$'\t' read -r high_count medium_count low_count info_count <<< "${counts}"
else
status="bad_json"
fi
else
if [[ "${status}" == "ok" ]]; then
status="no_report"
fi
fi
high_total=$((high_total + high_count))
medium_total=$((medium_total + medium_count))
low_total=$((low_total + low_count))
info_total=$((info_total + info_count))
if [[ "${status}" != "ok" ]]; then
error_total=$((error_total + 1))
error_targets="${error_targets}${slug}: ${status} (exit=${code})\n"
fi
if [[ "${high_count}" -gt 0 && -s "${out_json}" ]]; then
names="$(jq -r '.site[].alerts[]? | select(.riskdesc | startswith("High")) | .alert' "${out_json}" 2>/dev/null | sort -u | tr '\n' ';' || true)"
if [[ -n "${names}" ]]; then
high_names="${high_names}${slug}: ${names}\n"
fi
fi
summary_lines="${summary_lines}${slug}: high=${high_count}, med=${medium_count}, low=${low_count}, info=${info_count}, exit=${code}, status=${status}\n"
done < targets.txt
printf "%b" "${summary_lines}" > reports/zap/summary.txt
if [[ -n "${error_targets}" ]]; then
printf "%b" "${error_targets}" > reports/zap/errors.txt
fi
if [[ -n "${high_names}" ]]; then
printf "%b" "${high_names}" > reports/zap/high-names.txt
fi
echo "high=${high_total}" >> "${GITHUB_OUTPUT}"
echo "medium=${medium_total}" >> "${GITHUB_OUTPUT}"
echo "low=${low_total}" >> "${GITHUB_OUTPUT}"
echo "info=${info_total}" >> "${GITHUB_OUTPUT}"
echo "errors=${error_total}" >> "${GITHUB_OUTPUT}"
echo "summary<<EOF" >> "${GITHUB_OUTPUT}"
printf "%b" "${summary_lines}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
echo "error_targets<<EOF" >> "${GITHUB_OUTPUT}"
printf "%b" "${error_targets}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
echo "high_names<<EOF" >> "${GITHUB_OUTPUT}"
printf "%b" "${high_names}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
- name: Nuclei scan (low+)
id: nuclei
run: |
set -euo pipefail
mkdir -p reports/nuclei
out_jsonl="reports/nuclei/findings.jsonl"
out_sarif="reports/nuclei/findings.sarif"
out_log="reports/nuclei/nuclei.log"
echo "Nuclei scan started"
set +e
docker run --rm \
-v "${PWD}:/work" \
-w /work \
projectdiscovery/nuclei:v3.6.0 \
-l targets.txt \
-severity low,medium,high,critical \
-jle "${out_jsonl}" \
-se "${out_sarif}" \
-rl "${NUCLEI_RATE_LIMIT}" \
-c "${NUCLEI_CONCURRENCY}" \
-retries "${NUCLEI_RETRIES}" \
-timeout "${NUCLEI_TIMEOUT_SECONDS}" \
-no-color \
2>&1 | tee "${out_log}"
code=${PIPESTATUS[0]}
set -e
findings_total=0
findings_critical=0
findings_high=0
findings_medium=0
findings_low=0
findings_info=0
if [[ -s "${out_jsonl}" ]]; then
counts="$(NUCLEI_JSONL="${out_jsonl}" python - <<'PY'
import collections
import json
import os
path = os.environ["NUCLEI_JSONL"]
counts = collections.Counter()
total = 0
with open(path, "r", encoding="utf-8") as handle:
for line in handle:
line = line.strip()
if not line:
continue
total += 1
try:
obj = json.loads(line)
except json.JSONDecodeError:
continue
sev = (
obj.get("info", {}).get("severity")
or obj.get("severity")
or "unknown"
)
sev = sev.lower()
if sev.startswith("info"):
sev = "info"
counts[sev] += 1
print(
f"{total}\t{counts.get('critical', 0)}\t{counts.get('high', 0)}\t{counts.get('medium', 0)}\t{counts.get('low', 0)}\t{counts.get('info', 0)}"
)
PY
)"
IFS=$'\t' read -r findings_total findings_critical findings_high findings_medium findings_low findings_info <<< "${counts}"
fi
error_excerpt=""
if [[ -s "${out_log}" ]]; then
error_excerpt="$(grep -E -i 'error|fatal|panic|could not|failed|timeout' "${out_log}" | head -n 10 || true)"
if [[ -z "${error_excerpt}" ]]; then
error_excerpt="$(tail -n 10 "${out_log}" || true)"
fi
fi
if [[ -n "${error_excerpt}" ]]; then
printf "%b\n" "${error_excerpt}" > reports/nuclei/error-summary.txt
fi
summary=""
if [[ "${findings_total}" -gt 0 ]]; then
summary="$(head -n 10 "${out_jsonl}" | jq -Rr '
fromjson?
| ((.info.severity // .severity // "?")
+ " | "
+ (.["template-id"] // .templateID // "?")
+ " | "
+ (.["matched-at"] // .matchedAt // .host // "?"))
')"
fi
if [[ -n "${summary}" ]]; then
printf "%b\n" "${summary}" > reports/nuclei/summary.txt
fi
echo "exit_code=${code}" >> "${GITHUB_OUTPUT}"
echo "findings_total=${findings_total}" >> "${GITHUB_OUTPUT}"
echo "findings_critical=${findings_critical}" >> "${GITHUB_OUTPUT}"
echo "findings_high=${findings_high}" >> "${GITHUB_OUTPUT}"
echo "findings_medium=${findings_medium}" >> "${GITHUB_OUTPUT}"
echo "findings_low=${findings_low}" >> "${GITHUB_OUTPUT}"
echo "findings_info=${findings_info}" >> "${GITHUB_OUTPUT}"
echo "error_excerpt<<EOF" >> "${GITHUB_OUTPUT}"
echo "${error_excerpt}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
echo "summary<<EOF" >> "${GITHUB_OUTPUT}"
echo "${summary}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
- name: Upload SARIF (Nuclei)
if: ${{ always() && hashFiles('reports/nuclei/findings.sarif') != '' }}
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: reports/nuclei/findings.sarif
- name: Fail on high/critical findings
if: ${{ always() }}
run: |
set -euo pipefail
nuclei_exit_code="${{ steps.nuclei.outputs.exit_code }}"
nuclei_total="${{ steps.nuclei.outputs.findings_total }}"
nuclei_critical="${{ steps.nuclei.outputs.findings_critical }}"
nuclei_high="${{ steps.nuclei.outputs.findings_high }}"
zap_high="${{ steps.zap.outputs.high }}"
zap_error_targets="${{ steps.zap.outputs.error_targets }}"
zap_errors="${{ steps.zap.outputs.errors }}"
if [[ "${nuclei_exit_code}" != "0" ]]; then
echo "Nuclei scan failed (exit_code=${nuclei_exit_code})."
if [[ -s "reports/nuclei/error-summary.txt" ]]; then
echo "Nuclei error summary:"
cat reports/nuclei/error-summary.txt
elif [[ -s "reports/nuclei/nuclei.log" ]]; then
echo "Nuclei log tail:"
tail -n 20 reports/nuclei/nuclei.log
fi
exit 1
fi
if [[ "${zap_errors}" != "0" ]]; then
echo "ZAP scan had errors (count=${zap_errors})."
if [[ -n "${zap_error_targets}" ]]; then
echo "${zap_error_targets}"
fi
if [[ -s "reports/zap/errors.txt" ]]; then
while IFS= read -r entry; do
slug="${entry%%:*}"
log="reports/zap/${slug}.log"
if [[ -s "${log}" ]]; then
echo "ZAP ${slug} log tail:"
tail -n 3 "${log}"
fi
done < reports/zap/errors.txt
fi
exit 1
fi
if (( nuclei_critical + nuclei_high > 0 )); then
echo "Found high/critical nuclei findings (critical=${nuclei_critical}, high=${nuclei_high}, total=${nuclei_total})."
exit 1
fi
if (( zap_high > 0 )); then
echo "Found ${zap_high} high-risk ZAP alerts."
exit 1
fi
echo "No high/critical findings detected."
- name: Upload pentest reports
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: pentest-reports-${{ github.run_id }}
path: |
targets.txt
reports/**
if-no-files-found: ignore
- name: Notify Discord (web pentests)
if: ${{ always() && env.PENTESTS_DISCORD_WEBHOOK_URL != '' }}
env:
JOB_STATUS: ${{ job.status }}
TARGETS_LIST: ${{ steps.targets.outputs.list }}
TARGETS_COUNT: ${{ steps.targets.outputs.count }}
ZAP_HIGH: ${{ steps.zap.outputs.high }}
ZAP_MEDIUM: ${{ steps.zap.outputs.medium }}
ZAP_LOW: ${{ steps.zap.outputs.low }}
ZAP_INFO: ${{ steps.zap.outputs.info }}
ZAP_ERRORS: ${{ steps.zap.outputs.errors }}
NUCLEI_EXIT_CODE: ${{ steps.nuclei.outputs.exit_code }}
NUCLEI_TOTAL: ${{ steps.nuclei.outputs.findings_total }}
NUCLEI_CRITICAL: ${{ steps.nuclei.outputs.findings_critical }}
NUCLEI_HIGH: ${{ steps.nuclei.outputs.findings_high }}
NUCLEI_MEDIUM: ${{ steps.nuclei.outputs.findings_medium }}
NUCLEI_LOW: ${{ steps.nuclei.outputs.findings_low }}
NUCLEI_INFO: ${{ steps.nuclei.outputs.findings_info }}
run: |
set -euo pipefail
run_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
trigger="${{ github.event_name }}"
schedule="${{ github.event.schedule }}"
branch="${{ github.ref_name }}"
actor="${{ github.actor }}"
ts="$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
trigger_label="${trigger}"
if [[ "${trigger}" == "schedule" && -n "${schedule}" ]]; then
trigger_label="cron (${schedule})"
elif [[ "${trigger}" == "workflow_dispatch" ]]; then
trigger_label="dispatch"
fi
status="${JOB_STATUS}"
color=9807270
title="Web pentests ${status}"
if [[ "${status}" == "success" ]]; then
color=3066993
title="Web pentests passed"
else
color=15158332
title="Web pentests failed"
fi
targets_display="$(echo "${TARGETS_LIST}" | sed -e 's/^/`/' -e 's/$/`/' | tr '\n' ' ' | sed -e 's/ */ /g' | cut -c1-900)"
nuclei_summary=""
if [[ -s "reports/nuclei/summary.txt" ]]; then
nuclei_summary="$(printf '```%s```' "$(sed -e 's/\r$//' "reports/nuclei/summary.txt" | head -n 10 | cut -c1-900)")"
fi
nuclei_error_excerpt=""
if [[ -s "reports/nuclei/error-summary.txt" ]]; then
nuclei_error_excerpt="$(printf '```%s```' "$(sed -e 's/\r$//' "reports/nuclei/error-summary.txt" | head -n 10 | cut -c1-900)")"
fi
zap_high_names=""
if [[ -s "reports/zap/high-names.txt" ]]; then
zap_high_names="$(printf '```%s```' "$(sed -e 's/\r$//' "reports/zap/high-names.txt" | head -n 10 | cut -c1-900)")"
fi
zap_summary=""
if [[ -s "reports/zap/summary.txt" ]]; then
zap_summary="$(printf '```%s```' "$(sed -e 's/\r$//' "reports/zap/summary.txt" | head -n 10 | cut -c1-900)")"
fi
zap_error_targets=""
if [[ -s "reports/zap/errors.txt" ]]; then
zap_error_targets="$(printf '```%s```' "$(sed -e 's/\r$//' "reports/zap/errors.txt" | head -n 10 | cut -c1-900)")"
fi
payload="$(jq -n \
--arg title "${title}" \
--arg status "${status}" \
--arg trigger "${trigger_label}" \
--arg branch "${branch}" \
--arg actor "${actor}" \
--arg run_url "${run_url}" \
--arg timestamp "${ts}" \
--arg targets "${targets_display}" \
--arg targets_count "${TARGETS_COUNT}" \
--arg zap_high "${ZAP_HIGH}" \
--arg zap_medium "${ZAP_MEDIUM}" \
--arg zap_low "${ZAP_LOW}" \
--arg zap_info "${ZAP_INFO}" \
--arg zap_errors "${ZAP_ERRORS}" \
--arg nuclei_total "${NUCLEI_TOTAL}" \
--arg nuclei_critical "${NUCLEI_CRITICAL}" \
--arg nuclei_high "${NUCLEI_HIGH}" \
--arg nuclei_medium "${NUCLEI_MEDIUM}" \
--arg nuclei_low "${NUCLEI_LOW}" \
--arg nuclei_info "${NUCLEI_INFO}" \
--arg nuclei_exit_code "${NUCLEI_EXIT_CODE}" \
--arg nuclei_summary "${nuclei_summary}" \
--arg zap_high_names "${zap_high_names}" \
--argjson color "${color}" \
'{
username: "TerraInfra",
embeds: [
{
title: $title,
color: $color,
fields: [
{name: "Status", value: $status, inline: true},
{name: "Trigger", value: $trigger, inline: true},
{name: "Branch", value: $branch, inline: true},
{name: "Actor", value: $actor, inline: true},
{name: "Targets", value: ("(" + $targets_count + ") " + $targets), inline: false},
{name: "Nuclei totals", value: ("crit=" + $nuclei_critical + ", high=" + $nuclei_high + ", med=" + $nuclei_medium + ", low=" + $nuclei_low + ", info=" + $nuclei_info + ", total=" + $nuclei_total + ", exit=" + $nuclei_exit_code), inline: true},
{name: "ZAP totals", value: ("high=" + $zap_high + ", med=" + $zap_medium + ", low=" + $zap_low + ", info=" + $zap_info + ", errors=" + $zap_errors), inline: true},
{name: "GitHub run", value: ("[Open run](" + $run_url + ")"), inline: true}
],
timestamp: $timestamp
}
]
}')"
if [[ -n "${zap_summary}" ]]; then
payload="$(echo "${payload}" | jq -c \
--arg zap_summary "${zap_summary}" \
'.embeds[0].fields += [{name: "ZAP per target", value: $zap_summary, inline: false}]'
)"
fi
if [[ "${status}" != "success" ]]; then
payload="$(echo "${payload}" | jq -c \
--arg nuclei_summary "${nuclei_summary}" \
--arg zap_high_names "${zap_high_names}" \
--arg nuclei_error_excerpt "${nuclei_error_excerpt}" \
--arg zap_error_targets "${zap_error_targets}" \
'
(if ($nuclei_summary | length) > 0 then
.embeds[0].fields += [{name: "Top Nuclei findings", value: $nuclei_summary, inline: false}]
else . end) |
(if ($zap_high_names | length) > 0 then
.embeds[0].fields += [{name: "ZAP high alerts", value: $zap_high_names, inline: false}]
else . end) |
(if ($nuclei_error_excerpt | length) > 0 then
.embeds[0].fields += [{name: "Nuclei errors", value: $nuclei_error_excerpt, inline: false}]
else . end) |
(if ($zap_error_targets | length) > 0 then
.embeds[0].fields += [{name: "ZAP error targets", value: $zap_error_targets, inline: false}]
else . end)
')"
fi
curl -fsS -H 'Content-Type: application/json' -d "${payload}" "${PENTESTS_DISCORD_WEBHOOK_URL}" || echo "Discord webhook notification failed" >&2
host-pentests:
name: Public hosts (Nmap full ports)
needs: prepare-targets
if: github.ref_name == 'env/prod'
runs-on: ubuntu-latest
env:
PENTESTS_DISCORD_WEBHOOK_URL: ${{ secrets.PENTESTS_DISCORD_WEBHOOK_URL }}
TARGETS_INPUT: ${{ needs.prepare-targets.outputs.targets_list }}
INSTANCE_HOST_TIMEOUT_MINUTES: ${{ github.event.inputs.instance_host_timeout_minutes || '45' }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Prepare targets
id: targets
run: |
set -euo pipefail
targets_raw="${TARGETS_INPUT}"
if [[ -z "${targets_raw}" ]]; then
targets_raw="${TARGETS_DEFAULT}"
fi
echo "${targets_raw}" | tr -d '\r' | sed '/^[[:space:]]*$/d' | awk '!seen[$0]++' > targets.txt
count="$(wc -l < targets.txt | tr -d ' ')"
echo "count=${count}" >> "${GITHUB_OUTPUT}"
echo "list<<EOF" >> "${GITHUB_OUTPUT}"
cat targets.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
- name: Resolve instance IPs
id: ips
run: |
set -euo pipefail
mkdir -p reports/instance
python - <<'PY'
import os
import socket
from urllib.parse import urlparse
targets = []
with open("targets.txt", "r", encoding="utf-8") as handle:
for line in handle:
line = line.strip()
if line:
targets.append(line)
ip_set = set()
resolve_map = []
errors = []
for url in targets:
parsed = urlparse(url)
host = parsed.hostname or url.split("/")[0]
if not host:
errors.append(f"{url}: invalid_host")
resolve_map.append(f"{url}: invalid_host")
continue
try:
infos = socket.getaddrinfo(host, None)
except Exception as exc:
errors.append(f"{host}: {exc}")
resolve_map.append(f"{host}: unresolved")
continue
ips = sorted({info[4][0] for info in infos if ":" not in info[4][0]})
if not ips:
errors.append(f"{host}: no_ipv4")
resolve_map.append(f"{host}: no_ipv4")
continue
for ip in ips:
ip_set.add(ip)
resolve_map.append(f"{host}: {', '.join(ips)}")
with open("instance_targets.txt", "w", encoding="utf-8") as handle:
for ip in sorted(ip_set):
handle.write(f"{ip}\n")
with open("reports/instance/resolve-map.txt", "w", encoding="utf-8") as handle:
handle.write("\n".join(resolve_map))
handle.write("\n")
if errors:
with open("reports/instance/resolve-errors.txt", "w", encoding="utf-8") as handle:
handle.write("\n".join(errors))
handle.write("\n")
PY
if [[ ! -s "instance_targets.txt" ]]; then
echo "No instance targets resolved."
exit 1
fi
count="$(wc -l < instance_targets.txt | tr -d ' ')"
echo "count=${count}" >> "${GITHUB_OUTPUT}"
echo "list<<EOF" >> "${GITHUB_OUTPUT}"
cat instance_targets.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
if [[ -s "reports/instance/resolve-errors.txt" ]]; then
echo "resolve_errors<<EOF" >> "${GITHUB_OUTPUT}"
cat reports/instance/resolve-errors.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
else
echo "resolve_errors=" >> "${GITHUB_OUTPUT}"
fi
- name: Nmap scan (external)
id: nmap
run: |
set -euo pipefail
sudo apt-get update
sudo apt-get install -y nmap
mkdir -p reports/instance
error_total=0
error_targets=""
while IFS= read -r ip; do
out_base="reports/instance/${ip}"
set +e
sudo nmap -Pn -sV -sC -p- -T4 --host-timeout "${INSTANCE_HOST_TIMEOUT_MINUTES}m" \
-oX "${out_base}.xml" \
-oN "${out_base}.nmap" \
"${ip}"
code=$?
set -e
if [[ "${code}" -ne 0 ]]; then
error_total=$((error_total + 1))
error_targets="${error_targets}${ip}: exit=${code}\n"
fi
done < instance_targets.txt
if [[ -n "${error_targets}" ]]; then
printf "%b" "${error_targets}" > reports/instance/errors.txt
fi
python - <<'PY'
import glob
import os
import xml.etree.ElementTree as ET
lines = []
for path in sorted(glob.glob("reports/instance/*.xml")):
try:
tree = ET.parse(path)
except Exception:
lines.append(f"{os.path.basename(path)}: parse_error")
continue
root = tree.getroot()
host = root.find("host")
if host is None:
lines.append(f"{os.path.basename(path)}: no_host")
continue
addr_el = host.find("address[@addrtype='ipv4']")
addr = addr_el.get("addr") if addr_el is not None else os.path.basename(path)
status_el = host.find("status")
status = status_el.get("state") if status_el is not None else "unknown"
ports = []
for port in host.findall("ports/port"):
state_el = port.find("state")
if state_el is None or state_el.get("state") != "open":
continue
service_el = port.find("service")
service = service_el.get("name") if service_el is not None else "unknown"
ports.append(f"{port.get('portid')}/{port.get('protocol')}({service})")
if status != "up":
line = f"{addr}: status={status}"
elif ports:
line = f"{addr}: " + ", ".join(ports)
else:
line = f"{addr}: no open ports"
lines.append(line)
if not lines:
lines.append("No instance scan reports generated.")
with open("reports/instance/summary.txt", "w", encoding="utf-8") as handle:
handle.write("\n".join(lines))
handle.write("\n")
PY
echo "errors=${error_total}" >> "${GITHUB_OUTPUT}"
echo "summary<<EOF" >> "${GITHUB_OUTPUT}"
cat reports/instance/summary.txt >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
echo "error_targets<<EOF" >> "${GITHUB_OUTPUT}"
printf "%b" "${error_targets}" >> "${GITHUB_OUTPUT}"
echo "EOF" >> "${GITHUB_OUTPUT}"
- name: Fail on scan errors
if: ${{ always() }}
run: |
set -euo pipefail
errors="${{ steps.nmap.outputs.errors }}"
if [[ "${errors}" != "0" ]]; then
echo "Nmap had errors (count=${errors})."
exit 1
fi
- name: Upload instance reports
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: instance-pentest-reports-${{ github.run_id }}
path: |
targets.txt
instance_targets.txt
reports/instance/**
if-no-files-found: ignore
- name: Notify Discord (host pentest)
if: ${{ always() && env.PENTESTS_DISCORD_WEBHOOK_URL != '' }}
env:
JOB_STATUS: ${{ job.status }}
TARGETS_LIST: ${{ steps.targets.outputs.list }}
TARGETS_COUNT: ${{ steps.targets.outputs.count }}
IPS_LIST: ${{ steps.ips.outputs.list }}
IPS_COUNT: ${{ steps.ips.outputs.count }}
RESOLVE_ERRORS: ${{ steps.ips.outputs.resolve_errors }}
NMAP_ERRORS: ${{ steps.nmap.outputs.errors }}
NMAP_ERROR_TARGETS: ${{ steps.nmap.outputs.error_targets }}
NMAP_SUMMARY: ${{ steps.nmap.outputs.summary }}
run: |
set -euo pipefail
run_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
trigger="${{ github.event_name }}"
schedule="${{ github.event.schedule }}"
branch="${{ github.ref_name }}"
actor="${{ github.actor }}"
ts="$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
trigger_label="${trigger}"
if [[ "${trigger}" == "schedule" && -n "${schedule}" ]]; then
trigger_label="cron (${schedule})"
elif [[ "${trigger}" == "workflow_dispatch" ]]; then
trigger_label="dispatch"
fi
status="${JOB_STATUS}"
color=9807270
title="Host pentest ${status}"
if [[ "${status}" == "success" ]]; then
color=3066993
title="Host pentest passed"
else
color=15158332
title="Host pentest failed"
fi
ips_display="$(echo "${IPS_LIST}" | sed -e 's/^/`/' -e 's/$/`/' | tr '\n' ' ' | sed -e 's/ */ /g' | cut -c1-900)"
summary_block=""
if [[ -n "${NMAP_SUMMARY}" ]]; then
summary_block="$(printf '```%s```' "$(echo "${NMAP_SUMMARY}" | sed -e 's/\r$//' | head -n 10 | cut -c1-900)")"
fi
resolve_errors_block=""
if [[ -n "${RESOLVE_ERRORS}" ]]; then
resolve_errors_block="$(printf '```%s```' "$(echo "${RESOLVE_ERRORS}" | sed -e 's/\r$//' | head -n 10 | cut -c1-900)")"
fi
nmap_error_targets_block=""
if [[ -n "${NMAP_ERROR_TARGETS}" ]]; then
nmap_error_targets_block="$(printf '```%s```' "$(echo "${NMAP_ERROR_TARGETS}" | sed -e 's/\r$//' | head -n 10 | cut -c1-900)")"
fi
payload="$(jq -n \
--arg title "${title}" \
--arg status "${status}" \
--arg trigger "${trigger_label}" \
--arg branch "${branch}" \
--arg actor "${actor}" \
--arg run_url "${run_url}" \
--arg timestamp "${ts}" \
--arg ips "${ips_display}" \
--arg ips_count "${IPS_COUNT}" \
--arg nmap_errors "${NMAP_ERRORS}" \
--argjson color "${color}" \
'{
username: "TerraInfra",
embeds: [
{
title: $title,
color: $color,
fields: [
{name: "Status", value: $status, inline: true},
{name: "Trigger", value: $trigger, inline: true},
{name: "Branch", value: $branch, inline: true},
{name: "Actor", value: $actor, inline: true},
{name: "Instance IPs", value: ("(" + $ips_count + ") " + $ips), inline: false},
{name: "Nmap errors", value: ("errors=" + $nmap_errors), inline: true},
{name: "GitHub run", value: ("[Open run](" + $run_url + ")"), inline: true}
],
timestamp: $timestamp
}
]
}')"
if [[ -n "${summary_block}" ]]; then
payload="$(echo "${payload}" | jq -c \
--arg summary_block "${summary_block}" \
'.embeds[0].fields += [{name: "Open ports (top 10)", value: $summary_block, inline: false}]'
)"
fi
if [[ "${status}" != "success" ]]; then
payload="$(echo "${payload}" | jq -c \
--arg resolve_errors_block "${resolve_errors_block}" \
--arg nmap_error_targets_block "${nmap_error_targets_block}" \
'
(if ($resolve_errors_block | length) > 0 then
.embeds[0].fields += [{name: "Resolve errors", value: $resolve_errors_block, inline: false}]
else . end) |
(if ($nmap_error_targets_block | length) > 0 then
.embeds[0].fields += [{name: "Nmap error targets", value: $nmap_error_targets_block, inline: false}]
else . end)
')"
fi
curl -fsS -H 'Content-Type: application/json' -d "${payload}" "${PENTESTS_DISCORD_WEBHOOK_URL}" || echo "Discord webhook notification failed" >&2