Compare commits
13 Commits
main
...
4d945aabab
| Author | SHA1 | Date | |
|---|---|---|---|
| 4d945aabab | |||
| ade401d9e6 | |||
| adf290e4ac | |||
| 0072307bec | |||
| d36a1e7655 | |||
| e3ed622ade | |||
| 1e7013269e | |||
| 45fd4454fa | |||
| 5870ab952f | |||
| 54615ec19a | |||
| e255600a93 | |||
| 8d012b8085 | |||
| b00aeb5580 |
111
Jenkinsfile
vendored
Normal file
111
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
environment {
|
||||
GIT_URL = 'https://gitea.mindboost.team/mindboost/education-flagger.git'
|
||||
GIT_BRANCH = 'pipeline/deploy-image'
|
||||
REGISTRY_SCHEME = 'https'
|
||||
REGISTRY_AUTHORITY = 'gitea.mindboost.team'
|
||||
IMAGE_NAME = 'mindboost/education-flagger'
|
||||
REGISTRY_CREDENTIALS_ID = 'REGISTRY_CREDENTIALS_ID'
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
script {
|
||||
checkout([
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: "*/${env.GIT_BRANCH}"]],
|
||||
userRemoteConfigs: [[
|
||||
url: env.GIT_URL,
|
||||
credentialsId: 'b5f383be-8c74-40f9-b7e1-3a9c5856df0e'
|
||||
]]
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Check Repository') {
|
||||
steps {
|
||||
script {
|
||||
sh 'pwd'
|
||||
sh 'ls -la'
|
||||
sh 'git status'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Determine Version') {
|
||||
steps {
|
||||
script {
|
||||
def fullHash = sh(
|
||||
script: 'git rev-parse HEAD',
|
||||
returnStdout: true
|
||||
).trim()
|
||||
env.IMAGE_TAG = "sha256-${fullHash}"
|
||||
echo "Resolved image tag: ${env.IMAGE_TAG}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Check Docker Image with the same tag') {
|
||||
steps {
|
||||
script {
|
||||
def imageExists = sh(
|
||||
script: "docker images -q ${env.IMAGE_NAME}:${env.IMAGE_TAG} || true",
|
||||
returnStdout: true
|
||||
).trim()
|
||||
|
||||
if (imageExists) {
|
||||
echo "Docker Image mit Tag ${env.IMAGE_TAG} existiert bereits. Überspringe Build."
|
||||
currentBuild.result = 'SUCCESS'
|
||||
return
|
||||
} else {
|
||||
echo "Kein vorhandenes Docker Image gefunden. Baue neues Image..."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build Docker Image') {
|
||||
when {
|
||||
expression { currentBuild.result == null }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
sh "docker build --rm -t ${env.IMAGE_NAME}:${env.IMAGE_TAG} ."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Push Docker Image') {
|
||||
when {
|
||||
expression { currentBuild.result == null }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
withCredentials([usernamePassword(
|
||||
credentialsId: env.REGISTRY_CREDENTIALS_ID,
|
||||
usernameVariable: 'REGISTRY_USER',
|
||||
passwordVariable: 'REGISTRY_PASS'
|
||||
)]) {
|
||||
def registryEndpoint = "${env.REGISTRY_SCHEME}://${env.REGISTRY_AUTHORITY}"
|
||||
sh "echo '${REGISTRY_PASS}' | docker login ${env.REGISTRY_AUTHORITY} -u '${REGISTRY_USER}' --password-stdin"
|
||||
sh "docker tag ${env.IMAGE_NAME}:${env.IMAGE_TAG} ${env.REGISTRY_AUTHORITY}/${env.IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||
sh "docker push ${env.REGISTRY_AUTHORITY}/${env.IMAGE_NAME}:${env.IMAGE_TAG}"
|
||||
sh "docker logout ${env.REGISTRY_AUTHORITY}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Cleanup Docker Images') {
|
||||
steps {
|
||||
script {
|
||||
sh 'set -eux; docker image prune -f; docker builder prune -f'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
18
LICENSE
Normal file
18
LICENSE
Normal file
@@ -0,0 +1,18 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2026 mindboost
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
|
||||
following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
|
||||
EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
22
README.md
22
README.md
@@ -60,6 +60,28 @@ Client
|
||||
|
||||
---
|
||||
|
||||
## Domain-Lookup (optional)
|
||||
|
||||
Für die Validierung von Institutions-Domains kann ein Lookup genutzt werden:
|
||||
|
||||
```
|
||||
GET /lookup?domain=uni-stuttgart.de
|
||||
```
|
||||
|
||||
Antwort (JSON):
|
||||
```json
|
||||
{
|
||||
"domain": "uni-stuttgart.de",
|
||||
"nren": true,
|
||||
"asn": 12345,
|
||||
"asn_org": "Universitaet Stuttgart",
|
||||
"ips": ["129.69.1.1"],
|
||||
"matched_ip": "129.69.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Integration
|
||||
|
||||
Der Service wird als **Traefik ForwardAuth Middleware** eingebunden.
|
||||
|
||||
@@ -60,6 +60,28 @@ Client
|
||||
|
||||
---
|
||||
|
||||
## Domain-Lookup (optional)
|
||||
|
||||
Für Backend-Validierung von Institutions-Domains:
|
||||
|
||||
```
|
||||
GET /lookup?domain=uni-stuttgart.de
|
||||
```
|
||||
|
||||
Antwort (JSON):
|
||||
```json
|
||||
{
|
||||
"domain": "uni-stuttgart.de",
|
||||
"nren": true,
|
||||
"asn": 12345,
|
||||
"asn_org": "Universitaet Stuttgart",
|
||||
"ips": ["129.69.1.1"],
|
||||
"matched_ip": "129.69.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Integration
|
||||
|
||||
Der Service wird als **Traefik ForwardAuth Middleware** eingebunden.
|
||||
|
||||
13
entrypoint.sh
Normal file
13
entrypoint.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
INTERVAL_SECONDS="${INTERVAL_SECONDS:-2592000}"
|
||||
echo "[start] updater interval=${INTERVAL_SECONDS}s out_dir=${OUT_DIR:-/data}"
|
||||
|
||||
while true; do
|
||||
echo "[run] update now"
|
||||
python /app/update.py
|
||||
echo "[sleep] ${INTERVAL_SECONDS}s"
|
||||
sleep "${INTERVAL_SECONDS}"
|
||||
done
|
||||
|
||||
19
example.env
Normal file
19
example.env
Normal file
@@ -0,0 +1,19 @@
|
||||
# Required
|
||||
MAXMIND_LICENSE_KEY=
|
||||
|
||||
# Optional (helps with rate limits)
|
||||
PDB_API_KEY=
|
||||
|
||||
# Output data location shared with the detection service
|
||||
OUT_DIR=/data
|
||||
|
||||
# PeeringDB settings
|
||||
PDB_BASE=https://www.peeringdb.com
|
||||
PDB_INFO_TYPE=Educational/Research
|
||||
PDB_LIMIT=250
|
||||
|
||||
# HTTP settings
|
||||
HTTP_TIMEOUT=30
|
||||
|
||||
# Update interval (seconds, default 30 days)
|
||||
INTERVAL_SECONDS=2592000
|
||||
49
healthcheck.sh
Normal file
49
healthcheck.sh
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
OUT_DIR="${OUT_DIR:-/data}"
|
||||
PDB_BASE="${PDB_BASE:-https://www.peeringdb.com}"
|
||||
INFO_TYPE="${PDB_INFO_TYPE:-Educational/Research}"
|
||||
|
||||
if [ -z "${MAXMIND_LICENSE_KEY:-}" ]; then
|
||||
echo "[health] MAXMIND_LICENSE_KEY missing" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "${OUT_DIR}" ]; then
|
||||
echo "[health] OUT_DIR missing: ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -s "${OUT_DIR}/GeoLite2-ASN.mmdb" ]; then
|
||||
echo "[health] GeoLite2-ASN.mmdb missing in ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -s "${OUT_DIR}/nren_asns.txt" ]; then
|
||||
echo "[health] nren_asns.txt missing in ${OUT_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mm_url="https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz"
|
||||
mm_code="$(curl -fsS -o /dev/null -w "%{http_code}" "${mm_url}" || true)"
|
||||
if [ "${mm_code}" != "200" ]; then
|
||||
echo "[health] MaxMind download not accessible (status ${mm_code})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pdb_code="000"
|
||||
pdb_url="${PDB_BASE}/api/net"
|
||||
pdb_args="--get --data-urlencode info_type=${INFO_TYPE} --data-urlencode limit=1 --data-urlencode skip=0 --data-urlencode fields=asn,status,info_type"
|
||||
if [ -n "${PDB_API_KEY:-}" ]; then
|
||||
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" -H "Authorization: Api-Key ${PDB_API_KEY}" ${pdb_args} "${pdb_url}" || true)"
|
||||
else
|
||||
pdb_code="$(curl -fsS -o /dev/null -w "%{http_code}" -H "Accept: application/json" ${pdb_args} "${pdb_url}" || true)"
|
||||
fi
|
||||
|
||||
if [ "${pdb_code}" != "200" ] && [ "${pdb_code}" != "429" ]; then
|
||||
echo "[health] PeeringDB not accessible (status ${pdb_code})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
89
main.go
89
main.go
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
@@ -19,6 +20,16 @@ type asnRecord struct {
|
||||
Org string `maxminddb:"autonomous_system_organization"`
|
||||
}
|
||||
|
||||
type lookupResponse struct {
|
||||
Domain string `json:"domain"`
|
||||
NREN bool `json:"nren"`
|
||||
ASN *uint `json:"asn,omitempty"`
|
||||
ASNOrg string `json:"asn_org,omitempty"`
|
||||
IPs []string `json:"ips"`
|
||||
MatchedIP string `json:"matched_ip,omitempty"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type server struct {
|
||||
db *maxminddb.Reader
|
||||
nrenASNs map[uint]struct{}
|
||||
@@ -76,6 +87,12 @@ func remoteIP(r *http.Request) string {
|
||||
return r.RemoteAddr
|
||||
}
|
||||
|
||||
func writeJSON(w http.ResponseWriter, status int, payload any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
_ = json.NewEncoder(w).Encode(payload)
|
||||
}
|
||||
|
||||
func (s *server) authHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if !s.ready.Load() {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
@@ -116,6 +133,77 @@ func (s *server) authHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
|
||||
func (s *server) lookupHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if !s.ready.Load() {
|
||||
writeJSON(w, http.StatusServiceUnavailable, lookupResponse{
|
||||
NREN: false,
|
||||
Error: "service not ready",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
domain := strings.TrimSpace(r.URL.Query().Get("domain"))
|
||||
if domain == "" {
|
||||
writeJSON(w, http.StatusBadRequest, lookupResponse{
|
||||
NREN: false,
|
||||
Error: "missing domain",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
ips, err := net.LookupIP(domain)
|
||||
if err != nil || len(ips) == 0 {
|
||||
writeJSON(w, http.StatusOK, lookupResponse{
|
||||
Domain: domain,
|
||||
NREN: false,
|
||||
Error: "domain lookup failed",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
resp := lookupResponse{
|
||||
Domain: domain,
|
||||
NREN: false,
|
||||
IPs: make([]string, 0, len(ips)),
|
||||
}
|
||||
|
||||
var firstASN *uint
|
||||
var firstOrg string
|
||||
|
||||
for _, ip := range ips {
|
||||
ipStr := ip.String()
|
||||
resp.IPs = append(resp.IPs, ipStr)
|
||||
|
||||
var rec asnRecord
|
||||
if err := s.db.Lookup(ip, &rec); err != nil || rec.ASN == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if firstASN == nil {
|
||||
firstASN = new(uint)
|
||||
*firstASN = rec.ASN
|
||||
firstOrg = rec.Org
|
||||
}
|
||||
|
||||
if _, ok := s.nrenASNs[rec.ASN]; ok {
|
||||
asn := rec.ASN
|
||||
resp.NREN = true
|
||||
resp.ASN = &asn
|
||||
resp.ASNOrg = rec.Org
|
||||
resp.MatchedIP = ipStr
|
||||
writeJSON(w, http.StatusOK, resp)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if firstASN != nil {
|
||||
resp.ASN = firstASN
|
||||
resp.ASNOrg = firstOrg
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func main() {
|
||||
mmdbPath := getenv("MMDB_PATH", "/data/GeoLite2-ASN.mmdb")
|
||||
asnListPath := getenv("ASN_LIST_PATH", "/data/nren_asns.txt")
|
||||
@@ -146,6 +234,7 @@ func main() {
|
||||
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("/auth", s.authHandler)
|
||||
mux.HandleFunc("/lookup", s.lookupHandler)
|
||||
mux.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) {
|
||||
if s.asnCount < s.minASN {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
|
||||
46
main_test.go
Normal file
46
main_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLookupMissingDomain(t *testing.T) {
|
||||
s := &server{
|
||||
nrenASNs: make(map[uint]struct{}),
|
||||
}
|
||||
s.ready.Store(true)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/lookup", nil)
|
||||
rr := httptest.NewRecorder()
|
||||
|
||||
s.lookupHandler(rr, req)
|
||||
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Fatalf("expected 400, got %d", rr.Code)
|
||||
}
|
||||
|
||||
if !strings.Contains(rr.Body.String(), "missing domain") {
|
||||
t.Fatalf("expected error message in response")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLookupServiceNotReady(t *testing.T) {
|
||||
s := &server{
|
||||
nrenASNs: make(map[uint]struct{}),
|
||||
}
|
||||
s.ready = atomic.Bool{}
|
||||
s.ready.Store(false)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/lookup?domain=example.com", nil)
|
||||
rr := httptest.NewRecorder()
|
||||
|
||||
s.lookupHandler(rr, req)
|
||||
|
||||
if rr.Code != http.StatusServiceUnavailable {
|
||||
t.Fatalf("expected 503, got %d", rr.Code)
|
||||
}
|
||||
}
|
||||
107
update.py
Normal file
107
update.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os, time, json, tarfile, tempfile, shutil
|
||||
import requests
|
||||
|
||||
OUT_DIR = os.getenv("OUT_DIR", "/data")
|
||||
LICENSE_KEY = os.getenv("MAXMIND_LICENSE_KEY", "").strip()
|
||||
PDB_API_KEY = os.getenv("PDB_API_KEY", "").strip()
|
||||
PDB_BASE = os.getenv("PDB_BASE", "https://www.peeringdb.com")
|
||||
INFO_TYPE = os.getenv("PDB_INFO_TYPE", "Educational/Research")
|
||||
TIMEOUT = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||
LIMIT = int(os.getenv("PDB_LIMIT", "250"))
|
||||
|
||||
def atomic_replace(src_path: str, dst_path: str) -> None:
|
||||
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
||||
tmp = dst_path + ".tmp"
|
||||
shutil.copyfile(src_path, tmp)
|
||||
os.replace(tmp, dst_path)
|
||||
|
||||
def download_maxmind_mmdb() -> None:
|
||||
if not LICENSE_KEY:
|
||||
raise RuntimeError("MAXMIND_LICENSE_KEY missing")
|
||||
|
||||
# Offizieller GeoLite2 Download-Mechanismus per license_key + edition_id
|
||||
url = (
|
||||
"https://download.maxmind.com/app/geoip_download"
|
||||
f"?edition_id=GeoLite2-ASN&license_key={LICENSE_KEY}&suffix=tar.gz"
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
tgz = os.path.join(td, "GeoLite2-ASN.tar.gz")
|
||||
r = requests.get(url, timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
with open(tgz, "wb") as f:
|
||||
f.write(r.content)
|
||||
|
||||
mmdb_found = None
|
||||
with tarfile.open(tgz, "r:gz") as tar:
|
||||
for member in tar.getmembers():
|
||||
if member.name.endswith("GeoLite2-ASN.mmdb"):
|
||||
tar.extract(member, path=td)
|
||||
mmdb_found = os.path.join(td, member.name)
|
||||
break
|
||||
|
||||
if not mmdb_found or not os.path.exists(mmdb_found):
|
||||
raise RuntimeError("GeoLite2-ASN.mmdb not found in archive")
|
||||
|
||||
atomic_replace(mmdb_found, os.path.join(OUT_DIR, "GeoLite2-ASN.mmdb"))
|
||||
|
||||
def pdb_headers():
|
||||
if not PDB_API_KEY:
|
||||
return {"Accept": "application/json"}
|
||||
# PeeringDB API Key (optional)
|
||||
return {"Accept": "application/json", "Authorization": f"Api-Key {PDB_API_KEY}"}
|
||||
|
||||
def fetch_pdb_page(skip: int):
|
||||
url = f"{PDB_BASE}/api/net"
|
||||
params = {
|
||||
"info_type": INFO_TYPE,
|
||||
"limit": LIMIT,
|
||||
"skip": skip,
|
||||
"fields": "asn,status,info_type",
|
||||
}
|
||||
r = requests.get(url, params=params, headers=pdb_headers(), timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
j = r.json()
|
||||
return j.get("data", [])
|
||||
|
||||
def update_nren_asns() -> None:
|
||||
asns = set()
|
||||
skip = 0
|
||||
while True:
|
||||
data = fetch_pdb_page(skip)
|
||||
for obj in data:
|
||||
if obj.get("status") != "ok":
|
||||
continue
|
||||
asn = obj.get("asn")
|
||||
if isinstance(asn, int) and asn > 0:
|
||||
asns.add(asn)
|
||||
if len(data) < LIMIT:
|
||||
break
|
||||
skip += LIMIT
|
||||
time.sleep(1.1) # sehr konservativ
|
||||
|
||||
out_txt = os.path.join(OUT_DIR, "nren_asns.txt")
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as f:
|
||||
for a in sorted(asns):
|
||||
f.write(f"{a}\n")
|
||||
tmp_path = f.name
|
||||
os.replace(tmp_path, out_txt)
|
||||
|
||||
def write_meta():
|
||||
meta = {
|
||||
"updated_at_unix": int(time.time()),
|
||||
"info_type": INFO_TYPE,
|
||||
"pdb_base": PDB_BASE,
|
||||
}
|
||||
with open(os.path.join(OUT_DIR, "metadata.json"), "w") as f:
|
||||
json.dump(meta, f, indent=2)
|
||||
|
||||
def main():
|
||||
os.makedirs(OUT_DIR, exist_ok=True)
|
||||
download_maxmind_mmdb()
|
||||
update_nren_asns()
|
||||
write_meta()
|
||||
print("[ok] updated mmdb + nren_asns")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user