Compare commits
15 Commits
feat/domai
...
ca80c3e6c7
| Author | SHA1 | Date | |
|---|---|---|---|
| ca80c3e6c7 | |||
| dc0e31060e | |||
| 58e1948bc2 | |||
| b2f7d9527f | |||
| 4d945aabab | |||
| 0072307bec | |||
| d36a1e7655 | |||
| e3ed622ade | |||
| 1e7013269e | |||
| 45fd4454fa | |||
| 5870ab952f | |||
| 54615ec19a | |||
| e255600a93 | |||
| 8d012b8085 | |||
| b00aeb5580 |
@@ -1,3 +1,6 @@
|
||||
# image tag to deploy from Gitea packages
|
||||
IMAGE_TAG=sha256-your-commit-hash
|
||||
|
||||
# MaxMind (create a free GeoLite2 license key in your MaxMind account)
|
||||
MAXMIND_LICENSE_KEY=your_maxmind_license_key
|
||||
|
||||
|
||||
244
Jenkinsfile
vendored
Normal file
244
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
pipeline {
|
||||
parameters {
|
||||
string(
|
||||
name: 'IMAGE_VERSION',
|
||||
defaultValue: '',
|
||||
description: 'Optional override for the Docker image tag (e.g., stable, release, 0.0.2). Leave empty to use the commit hash.'
|
||||
)
|
||||
string(
|
||||
name: 'GIT_REF',
|
||||
defaultValue: '',
|
||||
description: 'Branch or tag to build. Leave empty to use Jenkins-provided branch or default to main.'
|
||||
)
|
||||
booleanParam(
|
||||
name: 'CLEAN_BUILD',
|
||||
defaultValue: false,
|
||||
description: 'Run docker build --pull --no-cache when true.'
|
||||
)
|
||||
}
|
||||
|
||||
agent any
|
||||
|
||||
environment {
|
||||
GIT_URL = 'https://gitea.mindboost.team/mindboost/education-flagger.git'
|
||||
HEADER_IMAGE_NAME = 'mindboost/education-flagger-header'
|
||||
UPDATER_IMAGE_NAME = 'mindboost/education-flagger-updater'
|
||||
LOCAL_HEADER_IMAGE_NAME = 'education_flagger_header_image'
|
||||
LOCAL_UPDATER_IMAGE_NAME = 'education_flagger_updater_image'
|
||||
GIT_CREDENTIALS_ID = 'b5f383be-8c74-40f9-b7e1-3a9c5856df0e'
|
||||
REGISTRY_CREDENTIALS_ID = '62d300cc-d8c6-437a-8699-c58b9e1edcb0'
|
||||
REGISTRY_SCHEME = 'https'
|
||||
REGISTRY_HOST = 'gitea.mindboost.team'
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
script {
|
||||
def selectedRef = params?.GIT_REF?.trim()
|
||||
if (!selectedRef) {
|
||||
selectedRef = env.CHANGE_BRANCH ?: env.BRANCH_NAME ?: env.GIT_BRANCH
|
||||
}
|
||||
if (!selectedRef) {
|
||||
selectedRef = 'main'
|
||||
echo "No GIT_REF supplied. Falling back to 'main'."
|
||||
}
|
||||
|
||||
def normalizedRef = selectedRef.replaceFirst('^origin/', '')
|
||||
def branchSpec = normalizedRef.startsWith('refs/') ? normalizedRef : "*/${normalizedRef}"
|
||||
echo "Checking out '${branchSpec}' from ${env.GIT_URL}"
|
||||
|
||||
checkout([
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: branchSpec]],
|
||||
userRemoteConfigs: [[
|
||||
url: env.GIT_URL,
|
||||
credentialsId: env.GIT_CREDENTIALS_ID
|
||||
]]
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Check Repository') {
|
||||
steps {
|
||||
script {
|
||||
sh 'pwd'
|
||||
sh 'ls -la'
|
||||
sh 'git status'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Determine Version') {
|
||||
steps {
|
||||
script {
|
||||
def imageVersion = ''
|
||||
|
||||
if (params?.IMAGE_VERSION) {
|
||||
imageVersion = params.IMAGE_VERSION.trim()
|
||||
echo "Using build parameter IMAGE_VERSION=${imageVersion}"
|
||||
}
|
||||
|
||||
if (!imageVersion) {
|
||||
def longSha = sh(
|
||||
script: 'git rev-parse HEAD',
|
||||
returnStdout: true
|
||||
).trim()
|
||||
imageVersion = "sha256-${longSha}"
|
||||
echo "No IMAGE_VERSION provided. Falling back to commit hash: ${imageVersion}"
|
||||
}
|
||||
|
||||
def sanitized = imageVersion.replaceAll('[^A-Za-z0-9_.-]', '-')
|
||||
if (sanitized != imageVersion) {
|
||||
echo "Sanitized version value from '${imageVersion}' to '${sanitized}' for Docker tag compatibility."
|
||||
}
|
||||
|
||||
env.IMAGE_TAG = sanitized
|
||||
echo "Resolved image tag: ${env.IMAGE_TAG}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Get Commit Hash') {
|
||||
steps {
|
||||
script {
|
||||
env.GIT_COMMIT_SHORT = sh(
|
||||
script: 'git rev-parse --short HEAD',
|
||||
returnStdout: true
|
||||
).trim()
|
||||
echo "Commit Hash: ${env.GIT_COMMIT_SHORT}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Check Docker Images with the same tag') {
|
||||
steps {
|
||||
script {
|
||||
def cleanBuild = params?.CLEAN_BUILD == true
|
||||
def headerImageExists = sh(
|
||||
script: "docker images -q ${env.LOCAL_HEADER_IMAGE_NAME}:${env.IMAGE_TAG} || true",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
def updaterImageExists = sh(
|
||||
script: "docker images -q ${env.LOCAL_UPDATER_IMAGE_NAME}:${env.IMAGE_TAG} || true",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
|
||||
if (cleanBuild) {
|
||||
echo "CLEAN_BUILD=true: ignoring existing local images for tag ${env.IMAGE_TAG}."
|
||||
} else if (headerImageExists && updaterImageExists) {
|
||||
echo "Both Docker images with tag ${env.IMAGE_TAG} already exist locally. Skipping build."
|
||||
currentBuild.result = 'SUCCESS'
|
||||
return
|
||||
} else {
|
||||
echo 'At least one local Docker image is missing. Building fresh images.'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build Docker Images') {
|
||||
when {
|
||||
expression { currentBuild.result == null }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def cleanBuild = params?.CLEAN_BUILD == true
|
||||
def buildFlags = cleanBuild ? '--pull --no-cache ' : ''
|
||||
sh "docker build ${buildFlags}-t ${env.LOCAL_HEADER_IMAGE_NAME}:${env.IMAGE_TAG} ."
|
||||
sh "docker build ${buildFlags}-t ${env.LOCAL_UPDATER_IMAGE_NAME}:${env.IMAGE_TAG} ./asn-updater"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Push Docker Images') {
|
||||
when {
|
||||
expression { currentBuild.result == null }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
withCredentials([
|
||||
usernamePassword(
|
||||
credentialsId: env.REGISTRY_CREDENTIALS_ID,
|
||||
usernameVariable: 'REGISTRY_USER',
|
||||
passwordVariable: 'REGISTRY_PASS'
|
||||
)
|
||||
]) {
|
||||
def registryAuthority = env.REGISTRY_HOST
|
||||
def registryEndpoint = "${env.REGISTRY_SCHEME}://${registryAuthority}"
|
||||
def remoteHeaderImageTag = "${registryAuthority}/${env.HEADER_IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||
def remoteUpdaterImageTag = "${registryAuthority}/${env.UPDATER_IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||
|
||||
withEnv([
|
||||
"REGISTRY_AUTHORITY=${registryAuthority}",
|
||||
"REGISTRY_ENDPOINT=${registryEndpoint}",
|
||||
"REMOTE_HEADER_IMAGE_TAG=${remoteHeaderImageTag}",
|
||||
"REMOTE_UPDATER_IMAGE_TAG=${remoteUpdaterImageTag}"
|
||||
]) {
|
||||
sh '''
|
||||
set -eux
|
||||
if [ -z "${IMAGE_TAG:-}" ]; then
|
||||
echo "IMAGE_TAG is empty. Did the Determine Version stage run?" >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REGISTRY_USER:-}" ]; then
|
||||
echo "REGISTRY_USER is empty. Check Jenkins credentials mapping." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REGISTRY_PASS:-}" ]; then
|
||||
echo "REGISTRY_PASS is empty. Check Jenkins credentials mapping." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REGISTRY_AUTHORITY:-}" ]; then
|
||||
echo "REGISTRY_AUTHORITY is empty. Registry authority not resolved." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REGISTRY_ENDPOINT:-}" ]; then
|
||||
echo "REGISTRY_ENDPOINT is empty. Registry endpoint not resolved." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REMOTE_HEADER_IMAGE_TAG:-}" ]; then
|
||||
echo "REMOTE_HEADER_IMAGE_TAG is empty. Derived header Docker tag missing." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${REMOTE_UPDATER_IMAGE_TAG:-}" ]; then
|
||||
echo "REMOTE_UPDATER_IMAGE_TAG is empty. Derived updater Docker tag missing." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker --version
|
||||
docker info
|
||||
docker image inspect "$LOCAL_HEADER_IMAGE_NAME:$IMAGE_TAG" >/dev/null
|
||||
docker image inspect "$LOCAL_UPDATER_IMAGE_NAME:$IMAGE_TAG" >/dev/null
|
||||
|
||||
echo "Logging into Docker registry $REGISTRY_ENDPOINT as $REGISTRY_USER"
|
||||
echo "$REGISTRY_PASS" | docker login "$REGISTRY_ENDPOINT" --username "$REGISTRY_USER" --password-stdin
|
||||
docker tag "$LOCAL_HEADER_IMAGE_NAME:$IMAGE_TAG" "$REMOTE_HEADER_IMAGE_TAG"
|
||||
docker tag "$LOCAL_UPDATER_IMAGE_NAME:$IMAGE_TAG" "$REMOTE_UPDATER_IMAGE_TAG"
|
||||
echo "Pushing Docker image $REMOTE_HEADER_IMAGE_TAG to $REGISTRY_AUTHORITY"
|
||||
docker push "$REMOTE_HEADER_IMAGE_TAG"
|
||||
echo "Pushing Docker image $REMOTE_UPDATER_IMAGE_TAG to $REGISTRY_AUTHORITY"
|
||||
docker push "$REMOTE_UPDATER_IMAGE_TAG"
|
||||
docker logout "$REGISTRY_ENDPOINT"
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Cleanup Docker Images') {
|
||||
when {
|
||||
expression { currentBuild.result == null }
|
||||
}
|
||||
steps {
|
||||
sh '''
|
||||
set -eux
|
||||
docker image prune -f
|
||||
docker builder prune -f
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
18
LICENSE
Normal file
18
LICENSE
Normal file
@@ -0,0 +1,18 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 mindboost
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
|
||||
following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
|
||||
EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
45
docker-compose.deploy.yml
Normal file
45
docker-compose.deploy.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
services:
|
||||
asn-header:
|
||||
image: gitea.mindboost.team/mindboost/education-flagger-header:${IMAGE_TAG}
|
||||
container_name: staging-asn-header
|
||||
restart: unless-stopped
|
||||
env_file: .env
|
||||
environment:
|
||||
MMDB_PATH: /data/GeoLite2-ASN.mmdb
|
||||
ASN_LIST_PATH: /data/nren_asns.txt
|
||||
ADDR: ":8080"
|
||||
volumes:
|
||||
- asn_data:/data:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:8080/healthz"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 20s
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
asn-updater:
|
||||
image: gitea.mindboost.team/mindboost/education-flagger-updater:${IMAGE_TAG}
|
||||
container_name: staging-asn-updater
|
||||
restart: unless-stopped
|
||||
env_file: .env
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
OUT_DIR: /data
|
||||
PDB_INFO_TYPE: "${PDB_INFO_TYPE}"
|
||||
INTERVAL_SECONDS: "${UPDATE_INTERVAL_SECONDS}"
|
||||
volumes:
|
||||
- asn_data:/data
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
networks:
|
||||
proxy:
|
||||
external: true
|
||||
name: ${PROXY_NETWORK}
|
||||
|
||||
volumes:
|
||||
asn_data:
|
||||
name: staging_asn_data
|
||||
13
entrypoint.sh
Normal file
13
entrypoint.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
INTERVAL_SECONDS="${INTERVAL_SECONDS:-2592000}"
|
||||
echo "[start] updater interval=${INTERVAL_SECONDS}s out_dir=${OUT_DIR:-/data}"
|
||||
|
||||
while true; do
|
||||
echo "[run] update now"
|
||||
python /app/update.py
|
||||
echo "[sleep] ${INTERVAL_SECONDS}s"
|
||||
sleep "${INTERVAL_SECONDS}"
|
||||
done
|
||||
|
||||
19
example.env
Normal file
19
example.env
Normal file
@@ -0,0 +1,19 @@
|
||||
# Required
|
||||
MAXMIND_LICENSE_KEY=
|
||||
|
||||
# Optional (helps with rate limits)
|
||||
PDB_API_KEY=
|
||||
|
||||
# Output data location shared with the detection service
|
||||
OUT_DIR=/data
|
||||
|
||||
# PeeringDB settings
|
||||
PDB_BASE=https://www.peeringdb.com
|
||||
PDB_INFO_TYPE=Educational/Research
|
||||
PDB_LIMIT=250
|
||||
|
||||
# HTTP settings
|
||||
HTTP_TIMEOUT=30
|
||||
|
||||
# Update interval (seconds, default 30 days)
|
||||
INTERVAL_SECONDS=2592000
|
||||
49
healthcheck.sh
Normal file
49
healthcheck.sh
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
OUT_DIR="${OUT_DIR:-/data}"
|
||||
PDB_BASE="${PDB_BASE:-https://www.peeringdb.com}"
|
||||
INFO_TYPE="${PDB_INFO_TYPE:-Educational/Research}"
|
||||
|
||||
if [ -z "${MAXMIND_LICENSE_KEY:-}" ]; then
|
||||
echo "[health] MAXMIND_LICENSE_KEY missing" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "${OUT_DIR}" ]; then
|
||||
echo "[health] OUT_DIR missing: ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -s "${OUT_DIR}/GeoLite2-ASN.mmdb" ]; then
|
||||
echo "[health] GeoLite2-ASN.mmdb missing in ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -s "${OUT_DIR}/nren_asns.txt" ]; then
|
||||
echo "[health] nren_asns.txt missing in ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mm_url="https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz"
|
||||
mm_code="$(curl -fsS -o /dev/null -w "%{http_code}" "${mm_url}" || true)"
|
||||
if [ "${mm_code}" != "200" ]; then
|
||||
echo "[health] MaxMind download not accessible (status ${mm_code})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pdb_code="000"
|
||||
pdb_url="${PDB_BASE}/api/net"
|
||||
pdb_args="--get --data-urlencode info_type=${INFO_TYPE} --data-urlencode limit=1 --data-urlencode skip=0 --data-urlencode fields=asn,status,info_type"
|
||||
if [ -n "${PDB_API_KEY:-}" ]; then
|
||||
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" -H "Authorization: Api-Key ${PDB_API_KEY}" ${pdb_args} "${pdb_url}" || true)"
|
||||
else
|
||||
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" ${pdb_args} "${pdb_url}" || true)"
|
||||
fi
|
||||
|
||||
if [ "${pdb_code}" != "200" ] && [ "${pdb_code}" != "429" ]; then
|
||||
echo "[health] PeeringDB not accessible (status ${pdb_code})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
107
update.py
Normal file
107
update.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os, time, json, tarfile, tempfile, shutil
|
||||
import requests
|
||||
|
||||
OUT_DIR = os.getenv("OUT_DIR", "/data")
|
||||
LICENSE_KEY = os.getenv("MAXMIND_LICENSE_KEY", "").strip()
|
||||
PDB_API_KEY = os.getenv("PDB_API_KEY", "").strip()
|
||||
PDB_BASE = os.getenv("PDB_BASE", "https://www.peeringdb.com")
|
||||
INFO_TYPE = os.getenv("PDB_INFO_TYPE", "Educational/Research")
|
||||
TIMEOUT = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||
LIMIT = int(os.getenv("PDB_LIMIT", "250"))
|
||||
|
||||
def atomic_replace(src_path: str, dst_path: str) -> None:
|
||||
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
||||
tmp = dst_path + ".tmp"
|
||||
shutil.copyfile(src_path, tmp)
|
||||
os.replace(tmp, dst_path)
|
||||
|
||||
def download_maxmind_mmdb() -> None:
|
||||
if not LICENSE_KEY:
|
||||
raise RuntimeError("MAXMIND_LICENSE_KEY missing")
|
||||
|
||||
# Offizieller GeoLite2 Download-Mechanismus per license_key + edition_id
|
||||
url = (
|
||||
"https://download.maxmind.com/app/geoip_download"
|
||||
f"?edition_id=GeoLite2-ASN&license_key={LICENSE_KEY}&suffix=tar.gz"
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
tgz = os.path.join(td, "GeoLite2-ASN.tar.gz")
|
||||
r = requests.get(url, timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
with open(tgz, "wb") as f:
|
||||
f.write(r.content)
|
||||
|
||||
mmdb_found = None
|
||||
with tarfile.open(tgz, "r:gz") as tar:
|
||||
for member in tar.getmembers():
|
||||
if member.name.endswith("GeoLite2-ASN.mmdb"):
|
||||
tar.extract(member, path=td)
|
||||
mmdb_found = os.path.join(td, member.name)
|
||||
break
|
||||
|
||||
if not mmdb_found or not os.path.exists(mmdb_found):
|
||||
raise RuntimeError("GeoLite2-ASN.mmdb not found in archive")
|
||||
|
||||
atomic_replace(mmdb_found, os.path.join(OUT_DIR, "GeoLite2-ASN.mmdb"))
|
||||
|
||||
def pdb_headers():
|
||||
if not PDB_API_KEY:
|
||||
return {"Accept": "application/json"}
|
||||
# PeeringDB API Key (optional)
|
||||
return {"Accept": "application/json", "Authorization": f"Api-Key {PDB_API_KEY}"}
|
||||
|
||||
def fetch_pdb_page(skip: int):
|
||||
url = f"{PDB_BASE}/api/net"
|
||||
params = {
|
||||
"info_type": INFO_TYPE,
|
||||
"limit": LIMIT,
|
||||
"skip": skip,
|
||||
"fields": "asn,status,info_type",
|
||||
}
|
||||
r = requests.get(url, params=params, headers=pdb_headers(), timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
j = r.json()
|
||||
return j.get("data", [])
|
||||
|
||||
def update_nren_asns() -> None:
|
||||
asns = set()
|
||||
skip = 0
|
||||
while True:
|
||||
data = fetch_pdb_page(skip)
|
||||
for obj in data:
|
||||
if obj.get("status") != "ok":
|
||||
continue
|
||||
asn = obj.get("asn")
|
||||
if isinstance(asn, int) and asn > 0:
|
||||
asns.add(asn)
|
||||
if len(data) < LIMIT:
|
||||
break
|
||||
skip += LIMIT
|
||||
time.sleep(1.1) # sehr konservativ
|
||||
|
||||
out_txt = os.path.join(OUT_DIR, "nren_asns.txt")
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as f:
|
||||
for a in sorted(asns):
|
||||
f.write(f"{a}\n")
|
||||
tmp_path = f.name
|
||||
os.replace(tmp_path, out_txt)
|
||||
|
||||
def write_meta():
|
||||
meta = {
|
||||
"updated_at_unix": int(time.time()),
|
||||
"info_type": INFO_TYPE,
|
||||
"pdb_base": PDB_BASE,
|
||||
}
|
||||
with open(os.path.join(OUT_DIR, "metadata.json"), "w") as f:
|
||||
json.dump(meta, f, indent=2)
|
||||
|
||||
def main():
|
||||
os.makedirs(OUT_DIR, exist_ok=True)
|
||||
download_maxmind_mmdb()
|
||||
update_nren_asns()
|
||||
write_meta()
|
||||
print("[ok] updated mmdb + nren_asns")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user