mirror of
https://github.com/therootcompany/golib.git
synced 2026-04-24 12:48:00 +00:00
feat: add net/geoip for MaxMind GeoLite2 database downloads
Downloader checks file mtime before fetching (30/day rate limit). Extracts .mmdb atomically from tar.gz, preserving MaxMind's release date as mtime so freshness checks survive restarts. Strips auth header on redirects (302 → Cloudflare R2 presigned URL). Default: 3-day threshold, 5-minute timeout. Also ignores GeoIP.conf and *.mmdb in .gitignore.
This commit is contained in:
parent
8c578ee0c6
commit
da33660c7c
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,4 +1,6 @@
|
||||
credentials.tsv
|
||||
GeoIP.conf
|
||||
*.mmdb
|
||||
|
||||
.env
|
||||
*.env
|
||||
|
||||
94
net/geoip/cmd/geoip-update/main.go
Normal file
94
net/geoip/cmd/geoip-update/main.go
Normal file
@ -0,0 +1,94 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/therootcompany/golib/net/geoip"
|
||||
)
|
||||
|
||||
func main() {
|
||||
configPath := flag.String("config", "GeoIP.conf", "path to GeoIP.conf")
|
||||
dir := flag.String("dir", "", "directory to store .mmdb files (overrides DatabaseDirectory in config)")
|
||||
freshDays := flag.Int("fresh-days", 0, "skip download if file is younger than N days (default 3)")
|
||||
flag.Parse()
|
||||
|
||||
cfg, err := parseConf(*configPath)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
outDir := *dir
|
||||
if outDir == "" {
|
||||
outDir = cfg["DatabaseDirectory"]
|
||||
}
|
||||
if outDir == "" {
|
||||
outDir = "."
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(outDir, 0o755); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: mkdir %s: %v\n", outDir, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
accountID := cfg["AccountID"]
|
||||
licenseKey := cfg["LicenseKey"]
|
||||
if accountID == "" || licenseKey == "" {
|
||||
fmt.Fprintf(os.Stderr, "error: AccountID and LicenseKey are required in %s\n", *configPath)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
editions := strings.Fields(cfg["EditionIDs"])
|
||||
if len(editions) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "error: no EditionIDs found in %s\n", *configPath)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
d := geoip.New(accountID, licenseKey)
|
||||
d.FreshDays = *freshDays
|
||||
|
||||
exitCode := 0
|
||||
for _, edition := range editions {
|
||||
path := filepath.Join(outDir, edition+".mmdb")
|
||||
updated, err := d.Fetch(edition, path)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %s: %v\n", edition, err)
|
||||
exitCode = 1
|
||||
continue
|
||||
}
|
||||
if updated {
|
||||
info, _ := os.Stat(path)
|
||||
fmt.Printf("updated: %s -> %s (%s)\n", edition, path, info.ModTime().Format("2006-01-02"))
|
||||
} else {
|
||||
info, _ := os.Stat(path)
|
||||
fmt.Printf("fresh: %s (%s)\n", edition, info.ModTime().Format("2006-01-02"))
|
||||
}
|
||||
}
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
// parseConf reads a geoipupdate-style config file (key value pairs, # comments).
|
||||
func parseConf(path string) (map[string]string, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
cfg := make(map[string]string)
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
key, value, _ := strings.Cut(line, " ")
|
||||
cfg[strings.TrimSpace(key)] = strings.TrimSpace(value)
|
||||
}
|
||||
return cfg, scanner.Err()
|
||||
}
|
||||
141
net/geoip/geoip.go
Normal file
141
net/geoip/geoip.go
Normal file
@ -0,0 +1,141 @@
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
CityEdition = "GeoLite2-City"
|
||||
ASNEdition = "GeoLite2-ASN"
|
||||
CountryEdition = "GeoLite2-Country"
|
||||
|
||||
downloadBase = "https://download.maxmind.com/geoip/databases"
|
||||
defaultFreshDays = 3
|
||||
defaultTimeout = 5 * time.Minute
|
||||
)
|
||||
|
||||
// Downloader fetches MaxMind GeoLite2 .mmdb files from the download API.
|
||||
// It checks file mtime before downloading to stay within the 30/day rate limit.
|
||||
//
|
||||
// MaxMind preserves the database release date as the mtime of the .mmdb entry
|
||||
// inside the tar archive. After extraction, mtime reflects data age — not
|
||||
// download time — so it is reliable for freshness checks across restarts.
|
||||
type Downloader struct {
|
||||
AccountID string
|
||||
LicenseKey string
|
||||
FreshDays int // 0 uses 3
|
||||
Timeout time.Duration // 0 uses 5m
|
||||
}
|
||||
|
||||
// New returns a Downloader configured with the given credentials.
|
||||
func New(accountID, licenseKey string) *Downloader {
|
||||
return &Downloader{AccountID: accountID, LicenseKey: licenseKey}
|
||||
}
|
||||
|
||||
// Fetch downloads the named edition to path if the file is stale (mtime older
|
||||
// than FreshDays). Returns whether the file was updated.
|
||||
func (d *Downloader) Fetch(edition, path string) (bool, error) {
|
||||
freshDays := d.FreshDays
|
||||
if freshDays == 0 {
|
||||
freshDays = defaultFreshDays
|
||||
}
|
||||
|
||||
if info, err := os.Stat(path); err == nil {
|
||||
if time.Since(info.ModTime()) < time.Duration(freshDays)*24*time.Hour {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
timeout := d.Timeout
|
||||
if timeout == 0 {
|
||||
timeout = defaultTimeout
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/%s/download?suffix=tar.gz", downloadBase, edition)
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
req.SetBasicAuth(d.AccountID, d.LicenseKey)
|
||||
|
||||
// Strip auth on redirects: MaxMind issues a 302 to a Cloudflare R2 presigned
|
||||
// URL that must not receive our credentials.
|
||||
client := &http.Client{
|
||||
Timeout: timeout,
|
||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
req.Header.Del("Authorization")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return false, fmt.Errorf("unexpected status %d fetching %s", resp.StatusCode, url)
|
||||
}
|
||||
|
||||
if err := extractMMDB(resp.Body, path); err != nil {
|
||||
return false, fmt.Errorf("%s: %w", edition, err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// extractMMDB reads a MaxMind tar.gz archive, writes the .mmdb entry to path
|
||||
// atomically (via tmp+rename), and sets its mtime to MaxMind's release date.
|
||||
func extractMMDB(r io.Reader, path string) error {
|
||||
gr, err := gzip.NewReader(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer gr.Close()
|
||||
|
||||
tr := tar.NewReader(gr)
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
return fmt.Errorf("no .mmdb file found in archive")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !strings.HasSuffix(hdr.Name, ".mmdb") {
|
||||
continue
|
||||
}
|
||||
|
||||
tmp := path + ".tmp"
|
||||
f, err := os.Create(tmp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := io.Copy(f, tr); err != nil {
|
||||
f.Close()
|
||||
os.Remove(tmp)
|
||||
return err
|
||||
}
|
||||
f.Close()
|
||||
|
||||
if err := os.Rename(tmp, path); err != nil {
|
||||
os.Remove(tmp)
|
||||
return err
|
||||
}
|
||||
|
||||
// Preserve MaxMind's release date so mtime == data age, not download time.
|
||||
if !hdr.ModTime.IsZero() {
|
||||
os.Chtimes(path, hdr.ModTime, hdr.ModTime)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user