Compare commits
27 Commits
b00aeb5580
...
pipeline/d
| Author | SHA1 | Date | |
|---|---|---|---|
| 4d945aabab | |||
| ade401d9e6 | |||
| adf290e4ac | |||
| 0072307bec | |||
| d355e7e6a9 | |||
| 8131550603 | |||
| 2e3500f00a | |||
| eab112fa03 | |||
| 733bdd401a | |||
| 9025ab694d | |||
| 34e85bb6cd | |||
| 623c81d406 | |||
| 92d7a19186 | |||
| 979e35d680 | |||
| 2316af824d | |||
| 346e974be6 | |||
| d8a4316d32 | |||
| aa514869bc | |||
| 5171dc7d99 | |||
| d36a1e7655 | |||
| e3ed622ade | |||
| 1e7013269e | |||
| 45fd4454fa | |||
| 5870ab952f | |||
| 54615ec19a | |||
| e255600a93 | |||
| 8d012b8085 |
20
.env.example
Normal file
20
.env.example
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# MaxMind (create a free GeoLite2 license key in your MaxMind account)
|
||||||
|
MAXMIND_LICENSE_KEY=your_maxmind_license_key
|
||||||
|
|
||||||
|
# PeeringDB (optional; reduces rate limits)
|
||||||
|
PDB_API_KEY=your_peeringdb_api_key
|
||||||
|
|
||||||
|
# existing Traefik/proxy network name (must already exist)
|
||||||
|
PROXY_NETWORK=proxy
|
||||||
|
|
||||||
|
# update interval in seconds (30 days)
|
||||||
|
UPDATE_INTERVAL_SECONDS=2592000
|
||||||
|
|
||||||
|
# PeeringDB info_type filter (should match PeeringDB values)
|
||||||
|
PDB_INFO_TYPE=Educational/Research
|
||||||
|
|
||||||
|
# minimum ASN entries for healthy /healthz
|
||||||
|
MIN_ASN_COUNT=10
|
||||||
|
|
||||||
|
# retry interval after a failed update (e.g., MaxMind 429)
|
||||||
|
RETRY_SECONDS=3600
|
||||||
26
.gitignore
vendored
26
.gitignore
vendored
@@ -1,27 +1 @@
|
|||||||
# ---> Go
|
|
||||||
# If you prefer the allow list template instead of the deny list, see community template:
|
|
||||||
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
|
|
||||||
#
|
|
||||||
# Binaries for programs and plugins
|
|
||||||
*.exe
|
|
||||||
*.exe~
|
|
||||||
*.dll
|
|
||||||
*.so
|
|
||||||
*.dylib
|
|
||||||
|
|
||||||
# Test binary, built with `go test -c`
|
|
||||||
*.test
|
|
||||||
|
|
||||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
|
||||||
*.out
|
|
||||||
|
|
||||||
# Dependency directories (remove the comment below to include it)
|
|
||||||
# vendor/
|
|
||||||
|
|
||||||
# Go workspace file
|
|
||||||
go.work
|
|
||||||
go.work.sum
|
|
||||||
|
|
||||||
# env file
|
|
||||||
.env
|
.env
|
||||||
|
|
||||||
|
|||||||
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
FROM golang:1.22-alpine AS build
|
||||||
|
WORKDIR /src
|
||||||
|
COPY go.mod ./
|
||||||
|
COPY go.sum ./
|
||||||
|
RUN go mod download
|
||||||
|
COPY main.go ./
|
||||||
|
RUN CGO_ENABLED=0 go build -trimpath -ldflags="-s -w" -o /out/asn-header-service
|
||||||
|
|
||||||
|
FROM alpine:3.20
|
||||||
|
RUN apk add --no-cache wget
|
||||||
|
RUN adduser -D -H -u 10001 app
|
||||||
|
USER 10001
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=build /out/asn-header-service /app/asn-header-service
|
||||||
|
EXPOSE 8080
|
||||||
|
ENV ADDR=:8080
|
||||||
|
ENTRYPOINT ["/app/asn-header-service"]
|
||||||
111
Jenkinsfile
vendored
Normal file
111
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
pipeline {
|
||||||
|
agent any
|
||||||
|
|
||||||
|
environment {
|
||||||
|
GIT_URL = 'https://gitea.mindboost.team/mindboost/education-flagger.git'
|
||||||
|
GIT_BRANCH = 'pipeline/deploy-image'
|
||||||
|
REGISTRY_SCHEME = 'https'
|
||||||
|
REGISTRY_AUTHORITY = 'gitea.mindboost.team'
|
||||||
|
IMAGE_NAME = 'mindboost/education-flagger'
|
||||||
|
REGISTRY_CREDENTIALS_ID = 'REGISTRY_CREDENTIALS_ID'
|
||||||
|
}
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('Checkout') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
checkout([
|
||||||
|
$class: 'GitSCM',
|
||||||
|
branches: [[name: "*/${env.GIT_BRANCH}"]],
|
||||||
|
userRemoteConfigs: [[
|
||||||
|
url: env.GIT_URL,
|
||||||
|
credentialsId: 'b5f383be-8c74-40f9-b7e1-3a9c5856df0e'
|
||||||
|
]]
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Check Repository') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
sh 'pwd'
|
||||||
|
sh 'ls -la'
|
||||||
|
sh 'git status'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Determine Version') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
def fullHash = sh(
|
||||||
|
script: 'git rev-parse HEAD',
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
env.IMAGE_TAG = "sha256-${fullHash}"
|
||||||
|
echo "Resolved image tag: ${env.IMAGE_TAG}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Check Docker Image with the same tag') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
def imageExists = sh(
|
||||||
|
script: "docker images -q ${env.IMAGE_NAME}:${env.IMAGE_TAG} || true",
|
||||||
|
returnStdout: true
|
||||||
|
).trim()
|
||||||
|
|
||||||
|
if (imageExists) {
|
||||||
|
echo "Docker Image mit Tag ${env.IMAGE_TAG} existiert bereits. Überspringe Build."
|
||||||
|
currentBuild.result = 'SUCCESS'
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
echo "Kein vorhandenes Docker Image gefunden. Baue neues Image..."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Build Docker Image') {
|
||||||
|
when {
|
||||||
|
expression { currentBuild.result == null }
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
sh "docker build --rm -t ${env.IMAGE_NAME}:${env.IMAGE_TAG} ."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Push Docker Image') {
|
||||||
|
when {
|
||||||
|
expression { currentBuild.result == null }
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
withCredentials([usernamePassword(
|
||||||
|
credentialsId: env.REGISTRY_CREDENTIALS_ID,
|
||||||
|
usernameVariable: 'REGISTRY_USER',
|
||||||
|
passwordVariable: 'REGISTRY_PASS'
|
||||||
|
)]) {
|
||||||
|
def registryEndpoint = "${env.REGISTRY_SCHEME}://${env.REGISTRY_AUTHORITY}"
|
||||||
|
sh "echo '${REGISTRY_PASS}' | docker login ${env.REGISTRY_AUTHORITY} -u '${REGISTRY_USER}' --password-stdin"
|
||||||
|
sh "docker tag ${env.IMAGE_NAME}:${env.IMAGE_TAG} ${env.REGISTRY_AUTHORITY}/${env.IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||||
|
sh "docker push ${env.REGISTRY_AUTHORITY}/${env.IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||||
|
sh "docker logout ${env.REGISTRY_AUTHORITY}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('Cleanup Docker Images') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
sh 'set -eux; docker image prune -f; docker builder prune -f'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
120
README.md
120
README.md
@@ -1,4 +1,118 @@
|
|||||||
# education-flagger
|
# NREN / ASN Detection Service
|
||||||
|
|
||||||
Dieses Repo nutzt MaxMind und damit indirekt die Datenbank von PeeringDB um HTTP Anfragen in kurzer Zeit mit Header zu versehen die Aufschluss darauf geben, ob sich der Client in einem Research and Education Netwerk (Eduroam) von DFN, GÉANT, SWITCH oder RENATER befindet.
|
Dieses Projekt stellt einen **minimalen Microservice** bereit, um **Hochschul- und Forschungsnetzwerke (NRENs)** anhand der **Autonomous System Number (ASN)** zu erkennen.
|
||||||
Forschung und Bildung ist die wichtigste Investition in die Zukunft.
|
|
||||||
|
Der Zweck ist es, **Anfragen aus Hochschulnetzen (z. B. eduroam)** zu identifizieren, um **Research-bezogene Services kostenlos oder bevorzugt bereitzustellen**.
|
||||||
|
|
||||||
|
Das System dient ausschließlich der **Netzwerk-Klassifikation** und **ersetzt keine Authentifizierung**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ziel
|
||||||
|
|
||||||
|
- Erkennen, ob eine Anfrage aus einem **Hochschul- oder Forschungsnetz** stammt
|
||||||
|
- Bereitstellung eines **Header-Hinweises** für nachgelagerte Services
|
||||||
|
- Grundlage für Entscheidungen wie:
|
||||||
|
- kostenfreie Research-Features
|
||||||
|
- angepasste UI-Hinweise
|
||||||
|
- alternative Rate-Limits
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Funktionsweise (Kurzfassung)
|
||||||
|
|
||||||
|
```
|
||||||
|
Client
|
||||||
|
→ Traefik
|
||||||
|
→ ForwardAuth
|
||||||
|
→ ASN Detection Service
|
||||||
|
→ Header wird ergänzt
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Die Client-IP wird ermittelt
|
||||||
|
2. Die zugehörige ASN wird lokal nachgeschlagen
|
||||||
|
3. Die ASN wird mit einer NREN-ASN-Liste verglichen
|
||||||
|
4. Das Ergebnis wird als HTTP-Header zurückgegeben
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Datenquellen
|
||||||
|
|
||||||
|
- **GeoLite2 ASN (MaxMind)**
|
||||||
|
- kostenlos
|
||||||
|
- lokal
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
- **NREN-ASN-Liste**
|
||||||
|
- abgeleitet aus PeeringDB
|
||||||
|
- Kategorie: `Research and Education`
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bereitgestellte Header
|
||||||
|
|
||||||
|
| Header | Beschreibung |
|
||||||
|
|------|-------------|
|
||||||
|
| `X-ASN` | ASN der Client-IP |
|
||||||
|
| `X-ASN-ORG` | Organisation (optional) |
|
||||||
|
| `X-NREN` | `1` wenn ASN zu einem Hochschul-/Forschungsnetz gehört, sonst `0` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Domain-Lookup (optional)
|
||||||
|
|
||||||
|
Für die Validierung von Institutions-Domains kann ein Lookup genutzt werden:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /lookup?domain=uni-stuttgart.de
|
||||||
|
```
|
||||||
|
|
||||||
|
Antwort (JSON):
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"domain": "uni-stuttgart.de",
|
||||||
|
"nren": true,
|
||||||
|
"asn": 12345,
|
||||||
|
"asn_org": "Universitaet Stuttgart",
|
||||||
|
"ips": ["129.69.1.1"],
|
||||||
|
"matched_ip": "129.69.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Integration
|
||||||
|
|
||||||
|
Der Service wird als **Traefik ForwardAuth Middleware** eingebunden.
|
||||||
|
Die Header werden über `authResponseHeaders` an die eigentliche Anwendung weitergereicht.
|
||||||
|
|
||||||
|
Der Service ist **nicht öffentlich exponiert** und kommuniziert ausschließlich über das interne Docker-Netzwerk.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Update-Strategie
|
||||||
|
|
||||||
|
- monatliche Aktualisierung der ASN-Daten
|
||||||
|
- keine externen Requests während der Anfrageverarbeitung
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Healthcheck
|
||||||
|
|
||||||
|
- `GET /healthz` liefert `200`, wenn mindestens `MIN_ASN_COUNT` ASNs geladen sind
|
||||||
|
- Standard: `MIN_ASN_COUNT=10` (konfigurierbar via Env)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Einschränkungen
|
||||||
|
|
||||||
|
- Die Erkennung ist **heuristisch**
|
||||||
|
- Es gibt **keine Garantie**, dass jede Anfrage aus einem Hochschulnetz erkannt wird
|
||||||
|
- Die Information darf **nicht als Authentifizierungsmerkmal** verwendet werden
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Zusammenfassung
|
||||||
|
|
||||||
|
Dieses Projekt ermöglicht eine **performante, datenschutzfreundliche Erkennung von Hochschulnetzen**, um **Research-Angebote kontextabhängig bereitzustellen**, ohne Nutzer zu identifizieren oder externe Dienste zur Laufzeit zu kontaktieren.
|
||||||
|
|||||||
118
README_technical.md
Normal file
118
README_technical.md
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
# NREN / ASN Detection Service
|
||||||
|
|
||||||
|
Dieses Projekt stellt einen **minimalen Microservice** bereit, um **Hochschul- und Forschungsnetzwerke (NRENs)** anhand der **Autonomous System Number (ASN)** zu erkennen.
|
||||||
|
|
||||||
|
Der Zweck ist es, **Anfragen aus Hochschulnetzen (z. B. eduroam)** zu identifizieren, um **Research-bezogene Services kostenlos oder bevorzugt bereitzustellen**.
|
||||||
|
|
||||||
|
Das System dient ausschließlich der **Netzwerk-Klassifikation** und **ersetzt keine Authentifizierung**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ziel
|
||||||
|
|
||||||
|
- Erkennen, ob eine Anfrage aus einem **Hochschul- oder Forschungsnetz** stammt
|
||||||
|
- Bereitstellung eines **Header-Hinweises** für nachgelagerte Services
|
||||||
|
- Grundlage für Entscheidungen wie:
|
||||||
|
- kostenfreie Research-Features
|
||||||
|
- angepasste UI-Hinweise
|
||||||
|
- alternative Rate-Limits
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Funktionsweise (Kurzfassung)
|
||||||
|
|
||||||
|
```
|
||||||
|
Client
|
||||||
|
→ Traefik
|
||||||
|
→ ForwardAuth
|
||||||
|
→ ASN Detection Service
|
||||||
|
→ Header wird ergänzt
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Die Client-IP wird ermittelt
|
||||||
|
2. Die zugehörige ASN wird lokal nachgeschlagen
|
||||||
|
3. Die ASN wird mit einer NREN-ASN-Liste verglichen
|
||||||
|
4. Das Ergebnis wird als HTTP-Header zurückgegeben
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Datenquellen
|
||||||
|
|
||||||
|
- **GeoLite2 ASN (MaxMind)**
|
||||||
|
- kostenlos
|
||||||
|
- lokal
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
- **NREN-ASN-Liste**
|
||||||
|
- abgeleitet aus PeeringDB
|
||||||
|
- Kategorie: `Research and Education`
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bereitgestellte Header
|
||||||
|
|
||||||
|
| Header | Beschreibung |
|
||||||
|
|------|-------------|
|
||||||
|
| `X-ASN` | ASN der Client-IP |
|
||||||
|
| `X-ASN-ORG` | Organisation (optional) |
|
||||||
|
| `X-NREN` | `1` wenn ASN zu einem Hochschul-/Forschungsnetz gehört, sonst `0` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Domain-Lookup (optional)
|
||||||
|
|
||||||
|
Für Backend-Validierung von Institutions-Domains:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /lookup?domain=uni-stuttgart.de
|
||||||
|
```
|
||||||
|
|
||||||
|
Antwort (JSON):
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"domain": "uni-stuttgart.de",
|
||||||
|
"nren": true,
|
||||||
|
"asn": 12345,
|
||||||
|
"asn_org": "Universitaet Stuttgart",
|
||||||
|
"ips": ["129.69.1.1"],
|
||||||
|
"matched_ip": "129.69.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Integration
|
||||||
|
|
||||||
|
Der Service wird als **Traefik ForwardAuth Middleware** eingebunden.
|
||||||
|
Die Header werden über `authResponseHeaders` an die eigentliche Anwendung weitergereicht.
|
||||||
|
|
||||||
|
Der Service ist **nicht öffentlich exponiert** und kommuniziert ausschließlich über das interne Docker-Netzwerk.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Update-Strategie
|
||||||
|
|
||||||
|
- monatliche Aktualisierung der ASN-Daten
|
||||||
|
- keine externen Requests während der Anfrageverarbeitung
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Healthcheck
|
||||||
|
|
||||||
|
- `GET /healthz` liefert `200`, wenn mindestens `MIN_ASN_COUNT` ASNs geladen sind
|
||||||
|
- Standard: `MIN_ASN_COUNT=10` (konfigurierbar via Env)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Einschränkungen
|
||||||
|
|
||||||
|
- Die Erkennung ist **heuristisch**
|
||||||
|
- Es gibt **keine Garantie**, dass jede Anfrage aus einem Hochschulnetz erkannt wird
|
||||||
|
- Die Information darf **nicht als Authentifizierungsmerkmal** verwendet werden
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Zusammenfassung
|
||||||
|
|
||||||
|
Dieses Projekt ermöglicht eine **performante, datenschutzfreundliche Erkennung von Hochschulnetzen**, um **Research-Angebote kontextabhängig bereitzustellen**, ohne Nutzer zu identifizieren oder externe Dienste zur Laufzeit zu kontaktieren.
|
||||||
27
asn-updater/.gitignore
vendored
Normal file
27
asn-updater/.gitignore
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
.DS_Store
|
||||||
|
# ---> Go
|
||||||
|
# If you prefer the allow list template instead of the deny list, see community template:
|
||||||
|
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
|
||||||
|
#
|
||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.exe~
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
|
# Test binary, built with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
# Dependency directories (remove the comment below to include it)
|
||||||
|
# vendor/
|
||||||
|
|
||||||
|
# Go workspace file
|
||||||
|
go.work
|
||||||
|
go.work.sum
|
||||||
|
|
||||||
|
# env file
|
||||||
|
.env
|
||||||
19
asn-updater/Dockerfile
Normal file
19
asn-updater/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM python:3.12-alpine
|
||||||
|
|
||||||
|
RUN apk add --no-cache ca-certificates tzdata curl tar && update-ca-certificates
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY update.py /app/update.py
|
||||||
|
COPY entrypoint.sh /app/entrypoint.sh
|
||||||
|
COPY healthcheck.sh /app/healthcheck.sh
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir requests==2.32.3 \
|
||||||
|
&& chmod +x /app/entrypoint.sh /app/healthcheck.sh
|
||||||
|
|
||||||
|
ENV OUT_DIR=/data
|
||||||
|
VOLUME ["/data"]
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=5m --timeout=10s --start-period=30s --retries=3 \
|
||||||
|
CMD /app/healthcheck.sh
|
||||||
18
asn-updater/LICENSE
Normal file
18
asn-updater/LICENSE
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 mindboost
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||||
|
associated documentation files (the "Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
|
||||||
|
following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||||
|
portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||||
|
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
|
||||||
|
EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
102
asn-updater/README.md
Normal file
102
asn-updater/README.md
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# education-flagger
|
||||||
|
|
||||||
|
Forschung und Bildung sind die wichtigste Investition in die Zukunft, und der Zugang zu ihren Netzwerken verdient besondere Unterstützung.
|
||||||
|
|
||||||
|
Dieses Repo stellt einen **minimalen Microservice** bereit, um **Hochschul- und Forschungsnetzwerke (NRENs)** anhand der **Autonomous System Number (ASN)** zu erkennen. Ziel ist es, **Zugriff oder bevorzugte Behandlung** für Nutzer aus Research- und Education-Netzen zu ermöglichen, ohne personenbezogene Daten zu verarbeiten.
|
||||||
|
|
||||||
|
Das System dient ausschließlich der **Netzwerk-Klassifikation** und **ersetzt keine Authentifizierung**.
|
||||||
|
|
||||||
|
## Ziel
|
||||||
|
|
||||||
|
- Erkennen, ob eine Anfrage aus einem **Hochschul- oder Forschungsnetz** stammt
|
||||||
|
- Bereitstellung eines **Header-Hinweises** für nachgelagerte Services
|
||||||
|
- Grundlage für Entscheidungen wie:
|
||||||
|
- kostenfreie Research-Features
|
||||||
|
- angepasste UI-Hinweise
|
||||||
|
- alternative Rate-Limits
|
||||||
|
|
||||||
|
## Funktionsweise (Kurzfassung)
|
||||||
|
|
||||||
|
```
|
||||||
|
Client
|
||||||
|
-> Traefik
|
||||||
|
-> ForwardAuth
|
||||||
|
-> ASN Detection Service
|
||||||
|
-> Header wird ergänzt
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Die Client-IP wird ermittelt
|
||||||
|
2. Die zugehörige ASN wird lokal nachgeschlagen
|
||||||
|
3. Die ASN wird mit einer NREN-ASN-Liste verglichen
|
||||||
|
4. Das Ergebnis wird als HTTP-Header zurückgegeben
|
||||||
|
|
||||||
|
## Datenquellen
|
||||||
|
|
||||||
|
- **GeoLite2 ASN (MaxMind)**
|
||||||
|
- kostenlos
|
||||||
|
- lokal
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
- **NREN-ASN-Liste**
|
||||||
|
- abgeleitet aus PeeringDB
|
||||||
|
- Kategorie: `Research and Education`
|
||||||
|
- monatliche Aktualisierung
|
||||||
|
|
||||||
|
## Bereitgestellte Header
|
||||||
|
|
||||||
|
| Header | Beschreibung |
|
||||||
|
|------|-------------|
|
||||||
|
| `X-ASN` | ASN der Client-IP |
|
||||||
|
| `X-ASN-ORG` | Organisation (optional) |
|
||||||
|
| `X-NREN` | `1` wenn ASN zu einem Hochschul-/Forschungsnetz gehört, sonst `0` |
|
||||||
|
|
||||||
|
## Integration
|
||||||
|
|
||||||
|
Der Service wird als **Traefik ForwardAuth Middleware** eingebunden.
|
||||||
|
Die Header werden über `authResponseHeaders` an die eigentliche Anwendung weitergereicht.
|
||||||
|
|
||||||
|
Der Service ist **nicht öffentlich exponiert** und kommuniziert ausschließlich über das interne Docker-Netzwerk.
|
||||||
|
|
||||||
|
Die dafür vorgesehenen Labels sind:
|
||||||
|
|
||||||
|
# Middleware Definition (ForwardAuth -> asn-header)
|
||||||
|
- "traefik.http.middlewares.asn-enrich.forwardauth.address=http://asn-header:8080/auth"
|
||||||
|
- "traefik.http.middlewares.asn-enrich.forwardauth.trustForwardHeader=true"
|
||||||
|
- "traefik.http.middlewares.asn-enrich.forwardauth.authResponseHeaders=X-ASN,X-ASN-ORG,X-NREN"
|
||||||
|
|
||||||
|
# Middleware am Router aktivieren
|
||||||
|
- "traefik.http.routers.web.middlewares=asn-enrich@docker"
|
||||||
|
|
||||||
|
Bitte füge diese zu dem Service hinzu, bei welchem man die gewünschten Header möchte.
|
||||||
|
|
||||||
|
## Run/Deploy (kurz)
|
||||||
|
|
||||||
|
1. `example.env` kopieren und als `.env` befüllen (mindestens `MAXMIND_LICENSE_KEY`).
|
||||||
|
2. Den Updater-Container starten und `OUT_DIR` als Volume mounten (z. B. `/data`).
|
||||||
|
3. Den ASN-Detection-Service so starten, dass er **denselben** `OUT_DIR` liest.
|
||||||
|
4. Traefik ForwardAuth aktivieren und `authResponseHeaders` durchreichen.
|
||||||
|
5. Nach dem ersten Update sollten `GeoLite2-ASN.mmdb` und `nren_asns.txt` im `OUT_DIR` liegen.
|
||||||
|
|
||||||
|
## example.env (kurz erklärt)
|
||||||
|
|
||||||
|
- `MAXMIND_LICENSE_KEY`: notwendig für den GeoLite2 Download.
|
||||||
|
- `PDB_API_KEY`: optional, reduziert PeeringDB Rate-Limits.
|
||||||
|
- `OUT_DIR`: gemeinsamer Datenpfad zwischen Updater und Detection-Service.
|
||||||
|
- `PDB_BASE`, `PDB_INFO_TYPE`, `PDB_LIMIT`: PeeringDB Filter.
|
||||||
|
- `HTTP_TIMEOUT`: Timeout pro HTTP-Request.
|
||||||
|
- `INTERVAL_SECONDS`: Update-Intervall (Standard 30 Tage).
|
||||||
|
|
||||||
|
## Update-Strategie
|
||||||
|
|
||||||
|
- monatliche Aktualisierung der ASN-Daten
|
||||||
|
- keine externen Requests während der Anfrageverarbeitung
|
||||||
|
|
||||||
|
## Einschränkungen
|
||||||
|
|
||||||
|
- Die Erkennung ist **heuristisch**
|
||||||
|
- Es gibt **keine Garantie**, dass jede Anfrage aus einem Hochschulnetz erkannt wird
|
||||||
|
- Die Information darf **nicht als Authentifizierungsmerkmal** verwendet werden
|
||||||
|
|
||||||
|
## Zusammenfassung
|
||||||
|
|
||||||
|
Dieses Projekt ermöglicht eine **performante, datenschutzfreundliche Erkennung von Hochschulnetzen**, um **Research-Angebote kontextabhängig bereitzustellen**, ohne Nutzer zu identifizieren oder externe Dienste zur Laufzeit zu kontaktieren.
|
||||||
17
asn-updater/entrypoint.sh
Normal file
17
asn-updater/entrypoint.sh
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
INTERVAL_SECONDS="${INTERVAL_SECONDS:-2592000}"
|
||||||
|
RETRY_SECONDS="${RETRY_SECONDS:-3600}"
|
||||||
|
echo "[start] updater interval=${INTERVAL_SECONDS}s out_dir=${OUT_DIR:-/data}"
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
echo "[run] update now"
|
||||||
|
if python /app/update.py; then
|
||||||
|
echo "[sleep] ${INTERVAL_SECONDS}s"
|
||||||
|
sleep "${INTERVAL_SECONDS}"
|
||||||
|
else
|
||||||
|
echo "[warn] update failed; retry in ${RETRY_SECONDS}s"
|
||||||
|
sleep "${RETRY_SECONDS}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
19
asn-updater/example.env
Normal file
19
asn-updater/example.env
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Required
|
||||||
|
MAXMIND_LICENSE_KEY=
|
||||||
|
|
||||||
|
# Optional (helps with rate limits)
|
||||||
|
PDB_API_KEY=
|
||||||
|
|
||||||
|
# Output data location shared with the detection service
|
||||||
|
OUT_DIR=/data
|
||||||
|
|
||||||
|
# PeeringDB settings
|
||||||
|
PDB_BASE=https://www.peeringdb.com
|
||||||
|
PDB_INFO_TYPE=Educational/Research
|
||||||
|
PDB_LIMIT=250
|
||||||
|
|
||||||
|
# HTTP settings
|
||||||
|
HTTP_TIMEOUT=30
|
||||||
|
|
||||||
|
# Update interval (seconds, default 30 days)
|
||||||
|
INTERVAL_SECONDS=2592000
|
||||||
23
asn-updater/healthcheck.sh
Normal file
23
asn-updater/healthcheck.sh
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
OUT_DIR="${OUT_DIR:-/data}"
|
||||||
|
PDB_BASE="${PDB_BASE:-https://www.peeringdb.com}"
|
||||||
|
INFO_TYPE="${PDB_INFO_TYPE:-Educational/Research}"
|
||||||
|
|
||||||
|
if [ ! -d "${OUT_DIR}" ]; then
|
||||||
|
echo "[health] OUT_DIR missing: ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -s "${OUT_DIR}/GeoLite2-ASN.mmdb" ]; then
|
||||||
|
echo "[health] GeoLite2-ASN.mmdb missing in ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -s "${OUT_DIR}/nren_asns.txt" ]; then
|
||||||
|
echo "[health] nren_asns.txt missing in ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
133
asn-updater/update.py
Normal file
133
asn-updater/update.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import os, time, json, tarfile, tempfile, shutil
|
||||||
|
import requests
|
||||||
|
|
||||||
|
OUT_DIR = os.getenv("OUT_DIR", "/data")
|
||||||
|
LICENSE_KEY = os.getenv("MAXMIND_LICENSE_KEY", "").strip()
|
||||||
|
PDB_API_KEY = os.getenv("PDB_API_KEY", "").strip()
|
||||||
|
PDB_BASE = os.getenv("PDB_BASE", "https://www.peeringdb.com")
|
||||||
|
INFO_TYPE = os.getenv("PDB_INFO_TYPE", "Educational/Research")
|
||||||
|
TIMEOUT = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||||
|
LIMIT = int(os.getenv("PDB_LIMIT", "250"))
|
||||||
|
|
||||||
|
def atomic_replace(src_path: str, dst_path: str) -> None:
|
||||||
|
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
||||||
|
tmp = dst_path + ".tmp"
|
||||||
|
shutil.copyfile(src_path, tmp)
|
||||||
|
os.replace(tmp, dst_path)
|
||||||
|
os.chmod(dst_path, 0o644)
|
||||||
|
|
||||||
|
def download_maxmind_mmdb() -> None:
|
||||||
|
if not LICENSE_KEY:
|
||||||
|
raise RuntimeError("MAXMIND_LICENSE_KEY missing")
|
||||||
|
|
||||||
|
# Offizieller GeoLite2 Download-Mechanismus per license_key + edition_id
|
||||||
|
url = (
|
||||||
|
"https://download.maxmind.com/app/geoip_download"
|
||||||
|
f"?edition_id=GeoLite2-ASN&license_key={LICENSE_KEY}&suffix=tar.gz"
|
||||||
|
)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as td:
|
||||||
|
tgz = os.path.join(td, "GeoLite2-ASN.tar.gz")
|
||||||
|
r = requests.get(url, timeout=TIMEOUT)
|
||||||
|
if r.status_code == 429:
|
||||||
|
existing = os.path.join(OUT_DIR, "GeoLite2-ASN.mmdb")
|
||||||
|
if os.path.exists(existing):
|
||||||
|
print("[warn] MaxMind rate limited (429); keeping existing mmdb")
|
||||||
|
return
|
||||||
|
raise RuntimeError("MaxMind rate limited (429) and no existing mmdb")
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(tgz, "wb") as f:
|
||||||
|
f.write(r.content)
|
||||||
|
|
||||||
|
mmdb_found = None
|
||||||
|
with tarfile.open(tgz, "r:gz") as tar:
|
||||||
|
for member in tar.getmembers():
|
||||||
|
if member.name.endswith("GeoLite2-ASN.mmdb"):
|
||||||
|
tar.extract(member, path=td)
|
||||||
|
mmdb_found = os.path.join(td, member.name)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not mmdb_found or not os.path.exists(mmdb_found):
|
||||||
|
raise RuntimeError("GeoLite2-ASN.mmdb not found in archive")
|
||||||
|
|
||||||
|
atomic_replace(mmdb_found, os.path.join(OUT_DIR, "GeoLite2-ASN.mmdb"))
|
||||||
|
|
||||||
|
def pdb_headers():
|
||||||
|
if not PDB_API_KEY:
|
||||||
|
return {"Accept": "application/json"}
|
||||||
|
# PeeringDB API Key (optional)
|
||||||
|
return {"Accept": "application/json", "Authorization": f"Api-Key {PDB_API_KEY}"}
|
||||||
|
|
||||||
|
def fetch_pdb_page(skip: int, info_type: str):
|
||||||
|
url = f"{PDB_BASE}/api/net"
|
||||||
|
params = {
|
||||||
|
"info_type": info_type,
|
||||||
|
"limit": LIMIT,
|
||||||
|
"skip": skip,
|
||||||
|
"fields": "asn,status,info_type",
|
||||||
|
}
|
||||||
|
r = requests.get(url, params=params, headers=pdb_headers(), timeout=TIMEOUT)
|
||||||
|
r.raise_for_status()
|
||||||
|
j = r.json()
|
||||||
|
return j.get("data", [])
|
||||||
|
|
||||||
|
def update_nren_asns() -> str:
|
||||||
|
info_types = [INFO_TYPE]
|
||||||
|
# Alternate label seen in PeeringDB deployments.
|
||||||
|
if INFO_TYPE != "Research and Education":
|
||||||
|
info_types.append("Research and Education")
|
||||||
|
if INFO_TYPE != "Educational/Research":
|
||||||
|
info_types.append("Educational/Research")
|
||||||
|
|
||||||
|
asns = set()
|
||||||
|
used_info_type = INFO_TYPE
|
||||||
|
for info_type in info_types:
|
||||||
|
asns.clear()
|
||||||
|
skip = 0
|
||||||
|
while True:
|
||||||
|
data = fetch_pdb_page(skip, info_type)
|
||||||
|
for obj in data:
|
||||||
|
if obj.get("status") != "ok":
|
||||||
|
continue
|
||||||
|
asn = obj.get("asn")
|
||||||
|
if isinstance(asn, int) and asn > 0:
|
||||||
|
asns.add(asn)
|
||||||
|
if len(data) < LIMIT:
|
||||||
|
break
|
||||||
|
skip += LIMIT
|
||||||
|
time.sleep(1.1) # sehr konservativ
|
||||||
|
if asns:
|
||||||
|
used_info_type = info_type
|
||||||
|
break
|
||||||
|
|
||||||
|
if not asns:
|
||||||
|
print(f"[warn] no ASNs found for info_type(s)={info_types}")
|
||||||
|
|
||||||
|
out_txt = os.path.join(OUT_DIR, "nren_asns.txt")
|
||||||
|
with tempfile.NamedTemporaryFile("w", delete=False, dir=OUT_DIR) as f:
|
||||||
|
for a in sorted(asns):
|
||||||
|
f.write(f"{a}\n")
|
||||||
|
tmp_path = f.name
|
||||||
|
os.replace(tmp_path, out_txt)
|
||||||
|
os.chmod(out_txt, 0o644)
|
||||||
|
return used_info_type
|
||||||
|
|
||||||
|
def write_meta(info_type: str):
|
||||||
|
meta = {
|
||||||
|
"updated_at_unix": int(time.time()),
|
||||||
|
"info_type": info_type,
|
||||||
|
"pdb_base": PDB_BASE,
|
||||||
|
}
|
||||||
|
with open(os.path.join(OUT_DIR, "metadata.json"), "w") as f:
|
||||||
|
json.dump(meta, f, indent=2)
|
||||||
|
os.chmod(os.path.join(OUT_DIR, "metadata.json"), 0o644)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.makedirs(OUT_DIR, exist_ok=True)
|
||||||
|
download_maxmind_mmdb()
|
||||||
|
used_info_type = update_nren_asns()
|
||||||
|
write_meta(used_info_type)
|
||||||
|
print("[ok] updated mmdb + nren_asns")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
44
docker-compose.yml
Normal file
44
docker-compose.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
services:
|
||||||
|
asn-header:
|
||||||
|
build: .
|
||||||
|
container_name: asn-header
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
MMDB_PATH: /data/GeoLite2-ASN.mmdb
|
||||||
|
ASN_LIST_PATH: /data/nren_asns.txt
|
||||||
|
ADDR: ":8080"
|
||||||
|
volumes:
|
||||||
|
- asn_data:/data:ro
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "-qO-", "http://localhost:8080/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 20s
|
||||||
|
networks:
|
||||||
|
- proxy
|
||||||
|
|
||||||
|
asn-updater:
|
||||||
|
build: ./asn-updater
|
||||||
|
container_name: asn-updater
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file: .env
|
||||||
|
healthcheck:
|
||||||
|
disable: true
|
||||||
|
environment:
|
||||||
|
OUT_DIR: /data
|
||||||
|
PDB_INFO_TYPE: "Research and Education"
|
||||||
|
INTERVAL_SECONDS: "${UPDATE_INTERVAL_SECONDS}"
|
||||||
|
volumes:
|
||||||
|
- asn_data:/data
|
||||||
|
networks:
|
||||||
|
- proxy
|
||||||
|
|
||||||
|
networks:
|
||||||
|
proxy:
|
||||||
|
external: true
|
||||||
|
name: ${PROXY_NETWORK}
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
asn_data:
|
||||||
13
entrypoint.sh
Normal file
13
entrypoint.sh
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
INTERVAL_SECONDS="${INTERVAL_SECONDS:-2592000}"
|
||||||
|
echo "[start] updater interval=${INTERVAL_SECONDS}s out_dir=${OUT_DIR:-/data}"
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
echo "[run] update now"
|
||||||
|
python /app/update.py
|
||||||
|
echo "[sleep] ${INTERVAL_SECONDS}s"
|
||||||
|
sleep "${INTERVAL_SECONDS}"
|
||||||
|
done
|
||||||
|
|
||||||
19
example.env
Normal file
19
example.env
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Required
|
||||||
|
MAXMIND_LICENSE_KEY=
|
||||||
|
|
||||||
|
# Optional (helps with rate limits)
|
||||||
|
PDB_API_KEY=
|
||||||
|
|
||||||
|
# Output data location shared with the detection service
|
||||||
|
OUT_DIR=/data
|
||||||
|
|
||||||
|
# PeeringDB settings
|
||||||
|
PDB_BASE=https://www.peeringdb.com
|
||||||
|
PDB_INFO_TYPE=Educational/Research
|
||||||
|
PDB_LIMIT=250
|
||||||
|
|
||||||
|
# HTTP settings
|
||||||
|
HTTP_TIMEOUT=30
|
||||||
|
|
||||||
|
# Update interval (seconds, default 30 days)
|
||||||
|
INTERVAL_SECONDS=2592000
|
||||||
13
go.mod
Normal file
13
go.mod
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
module asn-header-service
|
||||||
|
|
||||||
|
go 1.22
|
||||||
|
|
||||||
|
require github.com/oschwald/maxminddb-golang v1.13.1
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/stretchr/testify v1.9.0 // indirect
|
||||||
|
golang.org/x/sys v0.21.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
||||||
12
go.sum
Normal file
12
go.sum
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
github.com/oschwald/maxminddb-golang v1.13.1 h1:G3wwjdN9JmIK2o/ermkHM+98oX5fS+k5MbwsmL4MRQE=
|
||||||
|
github.com/oschwald/maxminddb-golang v1.13.1/go.mod h1:K4pgV9N/GcK694KSTmVSDTODk4IsCNThNdTmnaBZ/F8=
|
||||||
|
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||||
|
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
49
healthcheck.sh
Normal file
49
healthcheck.sh
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
OUT_DIR="${OUT_DIR:-/data}"
|
||||||
|
PDB_BASE="${PDB_BASE:-https://www.peeringdb.com}"
|
||||||
|
INFO_TYPE="${PDB_INFO_TYPE:-Educational/Research}"
|
||||||
|
|
||||||
|
if [ -z "${MAXMIND_LICENSE_KEY:-}" ]; then
|
||||||
|
echo "[health] MAXMIND_LICENSE_KEY missing" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -d "${OUT_DIR}" ]; then
|
||||||
|
echo "[health] OUT_DIR missing: ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -s "${OUT_DIR}/GeoLite2-ASN.mmdb" ]; then
|
||||||
|
echo "[health] GeoLite2-ASN.mmdb missing in ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -s "${OUT_DIR}/nren_asns.txt" ]; then
|
||||||
|
echo "[health] nren_asns.txt missing in ${OUT_DIR}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mm_url="https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz"
|
||||||
|
mm_code="$(curl -fsS -o /dev/null -w "%{http_code}" "${mm_url}" || true)"
|
||||||
|
if [ "${mm_code}" != "200" ]; then
|
||||||
|
echo "[health] MaxMind download not accessible (status ${mm_code})" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
pdb_code="000"
|
||||||
|
pdb_url="${PDB_BASE}/api/net"
|
||||||
|
pdb_args="--get --data-urlencode info_type=${INFO_TYPE} --data-urlencode limit=1 --data-urlencode skip=0 --data-urlencode fields=asn,status,info_type"
|
||||||
|
if [ -n "${PDB_API_KEY:-}" ]; then
|
||||||
|
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" -H "Authorization: Api-Key ${PDB_API_KEY}" ${pdb_args} "${pdb_url}" || true)"
|
||||||
|
else
|
||||||
|
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" ${pdb_args} "${pdb_url}" || true)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${pdb_code}" != "200" ] && [ "${pdb_code}" != "429" ]; then
|
||||||
|
echo "[health] PeeringDB not accessible (status ${pdb_code})" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
274
main.go
Normal file
274
main.go
Normal file
@@ -0,0 +1,274 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/json"
|
||||||
|
"log"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/oschwald/maxminddb-golang"
|
||||||
|
)
|
||||||
|
|
||||||
|
type asnRecord struct {
|
||||||
|
ASN uint `maxminddb:"autonomous_system_number"`
|
||||||
|
Org string `maxminddb:"autonomous_system_organization"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type lookupResponse struct {
|
||||||
|
Domain string `json:"domain"`
|
||||||
|
NREN bool `json:"nren"`
|
||||||
|
ASN *uint `json:"asn,omitempty"`
|
||||||
|
ASNOrg string `json:"asn_org,omitempty"`
|
||||||
|
IPs []string `json:"ips"`
|
||||||
|
MatchedIP string `json:"matched_ip,omitempty"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type server struct {
|
||||||
|
db *maxminddb.Reader
|
||||||
|
nrenASNs map[uint]struct{}
|
||||||
|
ready atomic.Bool
|
||||||
|
versionTag string
|
||||||
|
minASN int
|
||||||
|
asnCount int
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadASNSet(path string) (map[uint]struct{}, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
set := make(map[uint]struct{}, 4096)
|
||||||
|
sc := bufio.NewScanner(f)
|
||||||
|
for sc.Scan() {
|
||||||
|
line := strings.TrimSpace(sc.Text())
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
v, err := strconv.ParseUint(line, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
set[uint(v)] = struct{}{}
|
||||||
|
}
|
||||||
|
return set, sc.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstForwardedFor(r *http.Request) string {
|
||||||
|
xff := r.Header.Get("X-Forwarded-For")
|
||||||
|
if xff == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
parts := strings.Split(xff, ",")
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(parts[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
func remoteIP(r *http.Request) string {
|
||||||
|
// Prefer XFF (because Traefik is proxy)
|
||||||
|
ip := firstForwardedFor(r)
|
||||||
|
if ip != "" {
|
||||||
|
return ip
|
||||||
|
}
|
||||||
|
host, _, err := net.SplitHostPort(r.RemoteAddr)
|
||||||
|
if err == nil {
|
||||||
|
return host
|
||||||
|
}
|
||||||
|
return r.RemoteAddr
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeJSON(w http.ResponseWriter, status int, payload any) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(status)
|
||||||
|
_ = json.NewEncoder(w).Encode(payload)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *server) authHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if !s.ready.Load() {
|
||||||
|
w.WriteHeader(http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ipStr := remoteIP(r)
|
||||||
|
parsed := net.ParseIP(ipStr)
|
||||||
|
if parsed == nil {
|
||||||
|
// Always 200: we enrich, not block
|
||||||
|
w.Header().Set("X-NREN", "0")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var rec asnRecord
|
||||||
|
if err := s.db.Lookup(parsed, &rec); err != nil || rec.ASN == 0 {
|
||||||
|
w.Header().Set("X-NREN", "0")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("X-ASN", strconv.FormatUint(uint64(rec.ASN), 10))
|
||||||
|
if rec.Org != "" {
|
||||||
|
// optional: keep it short; some org strings can be long
|
||||||
|
w.Header().Set("X-ASN-ORG", rec.Org)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok := s.nrenASNs[rec.ASN]
|
||||||
|
if ok {
|
||||||
|
w.Header().Set("X-NREN", "1")
|
||||||
|
} else {
|
||||||
|
w.Header().Set("X-NREN", "0")
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Cache-Control", "no-store")
|
||||||
|
w.Header().Set("X-Service", s.versionTag)
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *server) lookupHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if !s.ready.Load() {
|
||||||
|
writeJSON(w, http.StatusServiceUnavailable, lookupResponse{
|
||||||
|
NREN: false,
|
||||||
|
Error: "service not ready",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
domain := strings.TrimSpace(r.URL.Query().Get("domain"))
|
||||||
|
if domain == "" {
|
||||||
|
writeJSON(w, http.StatusBadRequest, lookupResponse{
|
||||||
|
NREN: false,
|
||||||
|
Error: "missing domain",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ips, err := net.LookupIP(domain)
|
||||||
|
if err != nil || len(ips) == 0 {
|
||||||
|
writeJSON(w, http.StatusOK, lookupResponse{
|
||||||
|
Domain: domain,
|
||||||
|
NREN: false,
|
||||||
|
Error: "domain lookup failed",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := lookupResponse{
|
||||||
|
Domain: domain,
|
||||||
|
NREN: false,
|
||||||
|
IPs: make([]string, 0, len(ips)),
|
||||||
|
}
|
||||||
|
|
||||||
|
var firstASN *uint
|
||||||
|
var firstOrg string
|
||||||
|
|
||||||
|
for _, ip := range ips {
|
||||||
|
ipStr := ip.String()
|
||||||
|
resp.IPs = append(resp.IPs, ipStr)
|
||||||
|
|
||||||
|
var rec asnRecord
|
||||||
|
if err := s.db.Lookup(ip, &rec); err != nil || rec.ASN == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if firstASN == nil {
|
||||||
|
firstASN = new(uint)
|
||||||
|
*firstASN = rec.ASN
|
||||||
|
firstOrg = rec.Org
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := s.nrenASNs[rec.ASN]; ok {
|
||||||
|
asn := rec.ASN
|
||||||
|
resp.NREN = true
|
||||||
|
resp.ASN = &asn
|
||||||
|
resp.ASNOrg = rec.Org
|
||||||
|
resp.MatchedIP = ipStr
|
||||||
|
writeJSON(w, http.StatusOK, resp)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if firstASN != nil {
|
||||||
|
resp.ASN = firstASN
|
||||||
|
resp.ASNOrg = firstOrg
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
mmdbPath := getenv("MMDB_PATH", "/data/GeoLite2-ASN.mmdb")
|
||||||
|
asnListPath := getenv("ASN_LIST_PATH", "/data/nren_asns.txt")
|
||||||
|
addr := getenv("ADDR", ":8080")
|
||||||
|
version := getenv("VERSION_TAG", "asn-header-service")
|
||||||
|
minASN := getenvInt("MIN_ASN_COUNT", 10)
|
||||||
|
|
||||||
|
db, err := maxminddb.Open(mmdbPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to open mmdb: %v", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
set, err := loadASNSet(asnListPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to load asn list: %v", err)
|
||||||
|
}
|
||||||
|
asnCount := len(set)
|
||||||
|
|
||||||
|
s := &server{
|
||||||
|
db: db,
|
||||||
|
nrenASNs: set,
|
||||||
|
versionTag: version,
|
||||||
|
minASN: minASN,
|
||||||
|
asnCount: asnCount,
|
||||||
|
}
|
||||||
|
s.ready.Store(true)
|
||||||
|
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
mux.HandleFunc("/auth", s.authHandler)
|
||||||
|
mux.HandleFunc("/lookup", s.lookupHandler)
|
||||||
|
mux.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) {
|
||||||
|
if s.asnCount < s.minASN {
|
||||||
|
w.WriteHeader(http.StatusServiceUnavailable)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
})
|
||||||
|
|
||||||
|
srv := &http.Server{
|
||||||
|
Addr: addr,
|
||||||
|
Handler: mux,
|
||||||
|
ReadHeaderTimeout: 2 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("listening on %s (asn_count=%d, min_asn=%d)", addr, asnCount, minASN)
|
||||||
|
log.Fatal(srv.ListenAndServe())
|
||||||
|
}
|
||||||
|
|
||||||
|
func getenv(k, def string) string {
|
||||||
|
v := strings.TrimSpace(os.Getenv(k))
|
||||||
|
if v == "" {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
func getenvInt(k string, def int) int {
|
||||||
|
v := strings.TrimSpace(os.Getenv(k))
|
||||||
|
if v == "" {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
parsed, err := strconv.Atoi(v)
|
||||||
|
if err != nil {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
return parsed
|
||||||
|
}
|
||||||
46
main_test.go
Normal file
46
main_test.go
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLookupMissingDomain(t *testing.T) {
|
||||||
|
s := &server{
|
||||||
|
nrenASNs: make(map[uint]struct{}),
|
||||||
|
}
|
||||||
|
s.ready.Store(true)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/lookup", nil)
|
||||||
|
rr := httptest.NewRecorder()
|
||||||
|
|
||||||
|
s.lookupHandler(rr, req)
|
||||||
|
|
||||||
|
if rr.Code != http.StatusBadRequest {
|
||||||
|
t.Fatalf("expected 400, got %d", rr.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(rr.Body.String(), "missing domain") {
|
||||||
|
t.Fatalf("expected error message in response")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLookupServiceNotReady(t *testing.T) {
|
||||||
|
s := &server{
|
||||||
|
nrenASNs: make(map[uint]struct{}),
|
||||||
|
}
|
||||||
|
s.ready = atomic.Bool{}
|
||||||
|
s.ready.Store(false)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/lookup?domain=example.com", nil)
|
||||||
|
rr := httptest.NewRecorder()
|
||||||
|
|
||||||
|
s.lookupHandler(rr, req)
|
||||||
|
|
||||||
|
if rr.Code != http.StatusServiceUnavailable {
|
||||||
|
t.Fatalf("expected 503, got %d", rr.Code)
|
||||||
|
}
|
||||||
|
}
|
||||||
107
update.py
Normal file
107
update.py
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
import os, time, json, tarfile, tempfile, shutil
|
||||||
|
import requests
|
||||||
|
|
||||||
|
OUT_DIR = os.getenv("OUT_DIR", "/data")
|
||||||
|
LICENSE_KEY = os.getenv("MAXMIND_LICENSE_KEY", "").strip()
|
||||||
|
PDB_API_KEY = os.getenv("PDB_API_KEY", "").strip()
|
||||||
|
PDB_BASE = os.getenv("PDB_BASE", "https://www.peeringdb.com")
|
||||||
|
INFO_TYPE = os.getenv("PDB_INFO_TYPE", "Educational/Research")
|
||||||
|
TIMEOUT = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||||
|
LIMIT = int(os.getenv("PDB_LIMIT", "250"))
|
||||||
|
|
||||||
|
def atomic_replace(src_path: str, dst_path: str) -> None:
|
||||||
|
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
||||||
|
tmp = dst_path + ".tmp"
|
||||||
|
shutil.copyfile(src_path, tmp)
|
||||||
|
os.replace(tmp, dst_path)
|
||||||
|
|
||||||
|
def download_maxmind_mmdb() -> None:
|
||||||
|
if not LICENSE_KEY:
|
||||||
|
raise RuntimeError("MAXMIND_LICENSE_KEY missing")
|
||||||
|
|
||||||
|
# Offizieller GeoLite2 Download-Mechanismus per license_key + edition_id
|
||||||
|
url = (
|
||||||
|
"https://download.maxmind.com/app/geoip_download"
|
||||||
|
f"?edition_id=GeoLite2-ASN&license_key={LICENSE_KEY}&suffix=tar.gz"
|
||||||
|
)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as td:
|
||||||
|
tgz = os.path.join(td, "GeoLite2-ASN.tar.gz")
|
||||||
|
r = requests.get(url, timeout=TIMEOUT)
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(tgz, "wb") as f:
|
||||||
|
f.write(r.content)
|
||||||
|
|
||||||
|
mmdb_found = None
|
||||||
|
with tarfile.open(tgz, "r:gz") as tar:
|
||||||
|
for member in tar.getmembers():
|
||||||
|
if member.name.endswith("GeoLite2-ASN.mmdb"):
|
||||||
|
tar.extract(member, path=td)
|
||||||
|
mmdb_found = os.path.join(td, member.name)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not mmdb_found or not os.path.exists(mmdb_found):
|
||||||
|
raise RuntimeError("GeoLite2-ASN.mmdb not found in archive")
|
||||||
|
|
||||||
|
atomic_replace(mmdb_found, os.path.join(OUT_DIR, "GeoLite2-ASN.mmdb"))
|
||||||
|
|
||||||
|
def pdb_headers():
|
||||||
|
if not PDB_API_KEY:
|
||||||
|
return {"Accept": "application/json"}
|
||||||
|
# PeeringDB API Key (optional)
|
||||||
|
return {"Accept": "application/json", "Authorization": f"Api-Key {PDB_API_KEY}"}
|
||||||
|
|
||||||
|
def fetch_pdb_page(skip: int):
|
||||||
|
url = f"{PDB_BASE}/api/net"
|
||||||
|
params = {
|
||||||
|
"info_type": INFO_TYPE,
|
||||||
|
"limit": LIMIT,
|
||||||
|
"skip": skip,
|
||||||
|
"fields": "asn,status,info_type",
|
||||||
|
}
|
||||||
|
r = requests.get(url, params=params, headers=pdb_headers(), timeout=TIMEOUT)
|
||||||
|
r.raise_for_status()
|
||||||
|
j = r.json()
|
||||||
|
return j.get("data", [])
|
||||||
|
|
||||||
|
def update_nren_asns() -> None:
|
||||||
|
asns = set()
|
||||||
|
skip = 0
|
||||||
|
while True:
|
||||||
|
data = fetch_pdb_page(skip)
|
||||||
|
for obj in data:
|
||||||
|
if obj.get("status") != "ok":
|
||||||
|
continue
|
||||||
|
asn = obj.get("asn")
|
||||||
|
if isinstance(asn, int) and asn > 0:
|
||||||
|
asns.add(asn)
|
||||||
|
if len(data) < LIMIT:
|
||||||
|
break
|
||||||
|
skip += LIMIT
|
||||||
|
time.sleep(1.1) # sehr konservativ
|
||||||
|
|
||||||
|
out_txt = os.path.join(OUT_DIR, "nren_asns.txt")
|
||||||
|
with tempfile.NamedTemporaryFile("w", delete=False) as f:
|
||||||
|
for a in sorted(asns):
|
||||||
|
f.write(f"{a}\n")
|
||||||
|
tmp_path = f.name
|
||||||
|
os.replace(tmp_path, out_txt)
|
||||||
|
|
||||||
|
def write_meta():
|
||||||
|
meta = {
|
||||||
|
"updated_at_unix": int(time.time()),
|
||||||
|
"info_type": INFO_TYPE,
|
||||||
|
"pdb_base": PDB_BASE,
|
||||||
|
}
|
||||||
|
with open(os.path.join(OUT_DIR, "metadata.json"), "w") as f:
|
||||||
|
json.dump(meta, f, indent=2)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.makedirs(OUT_DIR, exist_ok=True)
|
||||||
|
download_maxmind_mmdb()
|
||||||
|
update_nren_asns()
|
||||||
|
write_meta()
|
||||||
|
print("[ok] updated mmdb + nren_asns")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user