Initial commit: Skyfeed base
This commit is contained in:
parent
2f16d70b05
commit
3290f02b5b
108
cmd/skyfeed/main.go
Normal file
108
cmd/skyfeed/main.go
Normal file
|
|
@ -0,0 +1,108 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/geo"
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/output"
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/weather"
|
||||||
|
)
|
||||||
|
|
||||||
|
var rootCmd = &cobra.Command{
|
||||||
|
Use: "skyfeed",
|
||||||
|
Short: "Skyfeed - Open Weather Engine for Telefact and Terminal",
|
||||||
|
Long: `Skyfeed fetches and normalizes weather data from Environment Canada,
|
||||||
|
using a local IP database for accurate geolocation. It supports both CLI and API modes.`,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
cmd.Help()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Initialize configuration and ensure data directories exist
|
||||||
|
config.Init()
|
||||||
|
|
||||||
|
// Register subcommands
|
||||||
|
rootCmd.AddCommand(fetchCmd)
|
||||||
|
rootCmd.AddCommand(showCmd)
|
||||||
|
rootCmd.AddCommand(updateCmd)
|
||||||
|
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
fmt.Println("Error:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------------------
|
||||||
|
// Subcommands
|
||||||
|
// ------------------------------
|
||||||
|
|
||||||
|
var fetchCmd = &cobra.Command{
|
||||||
|
Use: "fetch",
|
||||||
|
Short: "Fetch the latest weather data for your current location",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
output.LogInfo("Skyfeed: Checking IP database...")
|
||||||
|
if err := geo.EnsureIPDBUpToDate(); err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Failed to update IP DB: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
output.LogInfo("Skyfeed: Detecting location...")
|
||||||
|
city, lat, lon, err := geo.GetUserLocation()
|
||||||
|
if err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Could not determine location: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
output.LogInfo(fmt.Sprintf("Detected: %s (%.4f, %.4f)", city, lat, lon))
|
||||||
|
|
||||||
|
output.LogInfo("Finding nearest Environment Canada station...")
|
||||||
|
station, err := geo.FindNearestStation(lat, lon)
|
||||||
|
if err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Station lookup failed: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
output.LogInfo(fmt.Sprintf("Nearest station: %s [%s]", station.Name, station.Code))
|
||||||
|
|
||||||
|
output.LogInfo("Fetching latest weather data...")
|
||||||
|
// Determine province from the station (if available)
|
||||||
|
province := strings.Split(station.Code, "_")[0] // fallback heuristic
|
||||||
|
data, err := weather.FetchCurrent(station.Code, province)
|
||||||
|
if err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Weather fetch failed: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(output.FormatWeatherCLI(data, true))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var showCmd = &cobra.Command{
|
||||||
|
Use: "show",
|
||||||
|
Short: "Show cached weather data from disk",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
data, err := weather.LoadCached()
|
||||||
|
if err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Failed to load cache: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fmt.Println(output.FormatWeatherCLI(data, true))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var updateCmd = &cobra.Command{
|
||||||
|
Use: "update-ipdb",
|
||||||
|
Short: "Manually update the local IP geolocation database",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
output.LogInfo("Forcing IP database update...")
|
||||||
|
if err := geo.ForceUpdateIPDB(); err != nil {
|
||||||
|
output.LogError(fmt.Sprintf("Update failed: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
output.LogSuccess("IP database updated successfully.")
|
||||||
|
},
|
||||||
|
}
|
||||||
0
config/default.json
Normal file
0
config/default.json
Normal file
0
docs/API_REFERENCE.md
Normal file
0
docs/API_REFERENCE.md
Normal file
0
docs/ARCHITECTURE.md
Normal file
0
docs/ARCHITECTURE.md
Normal file
0
docs/INTEGRATION_TELEFACT.md
Normal file
0
docs/INTEGRATION_TELEFACT.md
Normal file
0
docs/ROADMAP.md
Normal file
0
docs/ROADMAP.md
Normal file
0
docs/WEATHER_CODES.md
Normal file
0
docs/WEATHER_CODES.md
Normal file
15
go.mod
Normal file
15
go.mod
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
module github.com/leaktechnologies/skyfeed
|
||||||
|
|
||||||
|
go 1.25.4
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/oschwald/geoip2-golang v1.13.0
|
||||||
|
github.com/spf13/cobra v1.10.1
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
|
github.com/oschwald/maxminddb-golang v1.13.0 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.9 // indirect
|
||||||
|
golang.org/x/sys v0.20.0 // indirect
|
||||||
|
)
|
||||||
23
go.sum
Normal file
23
go.sum
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
|
github.com/oschwald/geoip2-golang v1.13.0 h1:Q44/Ldc703pasJeP5V9+aFSZFmBN7DKHbNsSFzQATJI=
|
||||||
|
github.com/oschwald/geoip2-golang v1.13.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo=
|
||||||
|
github.com/oschwald/maxminddb-golang v1.13.0 h1:R8xBorY71s84yO06NgTmQvqvTvlS/bnYZrrWX1MElnU=
|
||||||
|
github.com/oschwald/maxminddb-golang v1.13.0/go.mod h1:BU0z8BfFVhi1LQaonTwwGQlsHUEu9pWNdMfmq4ztm0o=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||||
|
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||||
|
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||||
|
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||||
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
1
internal/api/server.go
Normal file
1
internal/api/server.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package api
|
||||||
38
internal/config/config.go
Normal file
38
internal/config/config.go
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Global app paths (used by all other packages)
|
||||||
|
var (
|
||||||
|
ConfigDir string
|
||||||
|
CacheDir string
|
||||||
|
DataDir string
|
||||||
|
)
|
||||||
|
|
||||||
|
// Init creates the required directories and prints their paths if missing.
|
||||||
|
func Init() {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error: cannot resolve home directory:", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
ConfigDir = filepath.Join(home, ".config", "skyfeed")
|
||||||
|
CacheDir = filepath.Join(home, ".cache", "skyfeed")
|
||||||
|
DataDir = filepath.Join(home, ".local", "share", "skyfeed")
|
||||||
|
|
||||||
|
dirs := []string{ConfigDir, CacheDir, DataDir}
|
||||||
|
for _, dir := range dirs {
|
||||||
|
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||||
|
err := os.MkdirAll(dir, 0755)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error creating %s: %v\n", dir, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
1
internal/config/defaults.go
Normal file
1
internal/config/defaults.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package config
|
||||||
1
internal/geo/distance.go
Normal file
1
internal/geo/distance.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package geo
|
||||||
113
internal/geo/geolocate.go
Normal file
113
internal/geo/geolocate.go
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
package geo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
"github.com/oschwald/geoip2-golang"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetUserLocation resolves the user's public IP into a city and coordinates.
|
||||||
|
// It will try multiple fallback IP providers if the first one fails.
|
||||||
|
func GetUserLocation() (string, float64, float64, error) {
|
||||||
|
fmt.Println("[geo] Detecting location...")
|
||||||
|
|
||||||
|
ip, err := fetchPublicIP()
|
||||||
|
if err != nil {
|
||||||
|
return "", 0, 0, fmt.Errorf("failed to resolve public IP: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dbPath := filepath.Join(config.DataDir, "GeoLite2-City.mmdb")
|
||||||
|
db, err := geoip2.Open(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
return "", 0, 0, fmt.Errorf("failed to open GeoLite2 database: %w", err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
record, err := db.City(ip)
|
||||||
|
if err != nil {
|
||||||
|
return "", 0, 0, fmt.Errorf("geoip lookup failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
city := record.City.Names["en"]
|
||||||
|
prov := ""
|
||||||
|
if len(record.Subdivisions) > 0 {
|
||||||
|
prov = record.Subdivisions[0].Names["en"]
|
||||||
|
}
|
||||||
|
|
||||||
|
lat := record.Location.Latitude
|
||||||
|
lon := record.Location.Longitude
|
||||||
|
|
||||||
|
if city == "" && prov == "" {
|
||||||
|
return "", 0, 0, fmt.Errorf("no location info found for IP %s", ip.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[geo] Detected: %s, %s (%.4f, %.4f)\n", city, prov, lat, lon)
|
||||||
|
return fmt.Sprintf("%s, %s", city, prov), lat, lon, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchPublicIP tries multiple reliable endpoints for the public IPv4 address.
|
||||||
|
func fetchPublicIP() (net.IP, error) {
|
||||||
|
providers := []string{
|
||||||
|
"https://ipv4.icanhazip.com",
|
||||||
|
"https://api.ipify.org?format=json",
|
||||||
|
"https://ifconfig.co/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &http.Client{Timeout: 5 * time.Second}
|
||||||
|
|
||||||
|
for _, url := range providers {
|
||||||
|
ip, err := tryProvider(url, client)
|
||||||
|
if err == nil && ip != nil {
|
||||||
|
return ip, nil
|
||||||
|
}
|
||||||
|
fmt.Println("[geo] Fallback:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("all IP detection methods failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
// tryProvider queries a single IP API endpoint and parses IPv4 results.
|
||||||
|
func tryProvider(url string, client *http.Client) (net.IP, error) {
|
||||||
|
resp, err := client.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("network error (%s): %w", url, err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("HTTP %d (%s)", resp.StatusCode, url)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some APIs return plain text, others JSON
|
||||||
|
var result struct {
|
||||||
|
IP string `json:"ip"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try decode JSON
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&result); err == nil && result.IP != "" {
|
||||||
|
ip := net.ParseIP(result.IP)
|
||||||
|
if ip != nil && ip.To4() != nil {
|
||||||
|
return ip, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: plain text
|
||||||
|
resp2, err := client.Get(url)
|
||||||
|
if err == nil {
|
||||||
|
defer resp2.Body.Close()
|
||||||
|
buf := make([]byte, 64)
|
||||||
|
n, _ := resp2.Body.Read(buf)
|
||||||
|
ip := net.ParseIP(string(buf[:n]))
|
||||||
|
if ip != nil && ip.To4() != nil {
|
||||||
|
return ip, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("no valid IP found from %s", url)
|
||||||
|
}
|
||||||
137
internal/geo/ipdb_updater.go
Normal file
137
internal/geo/ipdb_updater.go
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
package geo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"compress/gzip"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
ipdbFileName = "GeoLite2-City.mmdb"
|
||||||
|
keyFileName = "maxmind.key"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EnsureIPDBUpToDate checks the local MaxMind database and refreshes monthly.
|
||||||
|
func EnsureIPDBUpToDate() error {
|
||||||
|
dbPath := filepath.Join(config.DataDir, ipdbFileName)
|
||||||
|
info, err := os.Stat(dbPath)
|
||||||
|
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
fmt.Println("[geo] No IP database found, downloading...")
|
||||||
|
return updateIPDB(dbPath)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to check IP DB: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
modTime := info.ModTime().UTC()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
firstOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, time.UTC)
|
||||||
|
|
||||||
|
if modTime.Before(firstOfMonth) {
|
||||||
|
fmt.Println("[geo] IP database is older than this month, refreshing...")
|
||||||
|
return updateIPDB(dbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[geo] IP database is current.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForceUpdateIPDB forces an immediate refresh.
|
||||||
|
func ForceUpdateIPDB() error {
|
||||||
|
dbPath := filepath.Join(config.DataDir, ipdbFileName)
|
||||||
|
fmt.Println("[geo] Forcing IP database update...")
|
||||||
|
return updateIPDB(dbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateIPDB downloads and extracts the official GeoLite2 City database using your MaxMind key.
|
||||||
|
func updateIPDB(dest string) error {
|
||||||
|
keyPath := filepath.Join(config.ConfigDir, keyFileName)
|
||||||
|
keyBytes, err := os.ReadFile(keyPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("[geo] Missing MaxMind license key.\nPlease run:\n echo \"YOUR_KEY\" > %s", keyPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
key := strings.TrimSpace(string(keyBytes))
|
||||||
|
url := fmt.Sprintf("https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key=%s&suffix=tar.gz", key)
|
||||||
|
|
||||||
|
tmpTar := dest + ".tar.gz"
|
||||||
|
if err := downloadFile(url, tmpTar); err != nil {
|
||||||
|
return fmt.Errorf("failed to download GeoLite2 archive: %w", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tmpTar)
|
||||||
|
|
||||||
|
if err := extractMMDB(tmpTar, dest); err != nil {
|
||||||
|
return fmt.Errorf("failed to extract mmdb: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[geo] IP database updated successfully →", dest)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// downloadFile streams a file from URL to disk.
|
||||||
|
func downloadFile(url, dest string) error {
|
||||||
|
resp, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("unexpected HTTP status: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
out, err := os.Create(dest)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(out, resp.Body)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractMMDB extracts the .mmdb file from a tar.gz archive.
|
||||||
|
func extractMMDB(src, dest string) error {
|
||||||
|
f, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
gz, err := gzip.NewReader(f)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer gz.Close()
|
||||||
|
|
||||||
|
tr := tar.NewReader(gz)
|
||||||
|
for {
|
||||||
|
h, err := tr.Next()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if filepath.Ext(h.Name) == ".mmdb" {
|
||||||
|
out, err := os.Create(dest)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
_, err = io.Copy(out, tr)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("no .mmdb found in archive")
|
||||||
|
}
|
||||||
183
internal/geo/stations.go
Normal file
183
internal/geo/stations.go
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
package geo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"encoding/xml"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Station represents an Environment Canada citypage station.
|
||||||
|
type Station struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Code string `json:"code"`
|
||||||
|
Province string `json:"province"`
|
||||||
|
Lat float64 `json:"lat"`
|
||||||
|
Lon float64 `json:"lon"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// stationCacheFile defines where we persist the station index locally.
|
||||||
|
const stationCacheFile = "stations.json"
|
||||||
|
|
||||||
|
// FindNearestStation locates the closest Environment Canada weather station to given coordinates.
|
||||||
|
func FindNearestStation(lat, lon float64) (Station, error) {
|
||||||
|
if lat == 0 && lon == 0 {
|
||||||
|
return Station{}, errors.New("invalid coordinates: cannot locate nearest station")
|
||||||
|
}
|
||||||
|
|
||||||
|
stations, err := ensureStationCache()
|
||||||
|
if err != nil {
|
||||||
|
return Station{}, fmt.Errorf("failed to load station list: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var nearest Station
|
||||||
|
minDist := math.MaxFloat64
|
||||||
|
|
||||||
|
for _, s := range stations {
|
||||||
|
d := Haversine(lat, lon, s.Lat, s.Lon)
|
||||||
|
if d < minDist {
|
||||||
|
minDist = d
|
||||||
|
nearest = s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if nearest.Code == "" {
|
||||||
|
return Station{}, errors.New("no station found in index")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[geo] Nearest station: %s (%s, %.2f km)\n", nearest.Name, nearest.Province, minDist)
|
||||||
|
return nearest, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensureStationCache loads the cached station list or updates it if missing/outdated.
|
||||||
|
func ensureStationCache() ([]Station, error) {
|
||||||
|
cachePath := filepath.Join(config.DataDir, stationCacheFile)
|
||||||
|
|
||||||
|
info, err := os.Stat(cachePath)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
fmt.Println("[geo] No station cache found, fetching from Environment Canada...")
|
||||||
|
return updateStationCache(cachePath)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refresh monthly
|
||||||
|
modTime := info.ModTime().UTC()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
firstOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, time.UTC)
|
||||||
|
if modTime.Before(firstOfMonth) {
|
||||||
|
fmt.Println("[geo] Station cache is older than this month, refreshing...")
|
||||||
|
return updateStationCache(cachePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load existing JSON cache
|
||||||
|
f, err := os.Open(cachePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
var stations []Station
|
||||||
|
if err := json.NewDecoder(f).Decode(&stations); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateStationCache fetches and parses Environment Canada's current site list (XML).
|
||||||
|
func updateStationCache(dest string) ([]Station, error) {
|
||||||
|
const ecURL = "https://geo.weather.gc.ca/geomet/features/collections/citypage_weather:siteList/items?f=xml"
|
||||||
|
|
||||||
|
fmt.Println("[geo] Downloading station list from:", ecURL)
|
||||||
|
|
||||||
|
client := &http.Client{Timeout: 25 * time.Second}
|
||||||
|
resp, err := client.Get(ecURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch EC station list: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, fmt.Errorf("unexpected HTTP status: %s — %s", resp.Status, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
type Site struct {
|
||||||
|
Code string `xml:"properties>code"`
|
||||||
|
NameEn string `xml:"properties>nameEn"`
|
||||||
|
NameFr string `xml:"properties>nameFr"`
|
||||||
|
Province string `xml:"properties>provinceCode"`
|
||||||
|
Lat float64 `xml:"geometry>coordinates>1"` // Note: GeoJSON order is [lon, lat]
|
||||||
|
Lon float64 `xml:"geometry>coordinates>0"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed struct {
|
||||||
|
Sites []Site `xml:"member"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := xml.NewDecoder(resp.Body).Decode(&parsed); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse site list XML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stations := make([]Station, 0, len(parsed.Sites))
|
||||||
|
for _, s := range parsed.Sites {
|
||||||
|
if s.Code == "" || s.Lat == 0 || s.Lon == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := s.NameEn
|
||||||
|
if name == "" {
|
||||||
|
name = s.NameFr
|
||||||
|
}
|
||||||
|
stations = append(stations, Station{
|
||||||
|
Name: name,
|
||||||
|
Code: s.Code,
|
||||||
|
Province: s.Province,
|
||||||
|
Lat: s.Lat,
|
||||||
|
Lon: s.Lon,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dest), 0755); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(dest)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
enc := json.NewEncoder(f)
|
||||||
|
enc.SetIndent("", " ")
|
||||||
|
if err := enc.Encode(stations); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[geo] Saved %d Environment Canada stations → %s\n", len(stations), dest)
|
||||||
|
return stations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Haversine computes the great-circle distance (in km) between two coordinates.
|
||||||
|
func Haversine(lat1, lon1, lat2, lon2 float64) float64 {
|
||||||
|
const R = 6371
|
||||||
|
dLat := toRadians(lat2 - lat1)
|
||||||
|
dLon := toRadians(lon2 - lon1)
|
||||||
|
lat1R := toRadians(lat1)
|
||||||
|
lat2R := toRadians(lat2)
|
||||||
|
a := math.Sin(dLat/2)*math.Sin(dLat/2) +
|
||||||
|
math.Cos(lat1R)*math.Cos(lat2R)*math.Sin(dLon/2)*math.Sin(dLon/2)
|
||||||
|
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
|
||||||
|
return R * c
|
||||||
|
}
|
||||||
|
|
||||||
|
func toRadians(deg float64) float64 { return deg * math.Pi / 180 }
|
||||||
1
internal/geo/towns_build.go
Normal file
1
internal/geo/towns_build.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package geo
|
||||||
56
internal/geo/towns_lookup.go
Normal file
56
internal/geo/towns_lookup.go
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
package geo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Town represents a Canadian town record (loaded from towns.json).
|
||||||
|
type Town struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Province string `json:"province"`
|
||||||
|
Lat float64 `json:"lat"`
|
||||||
|
Lon float64 `json:"lon"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindNearestTown loads the cached towns.json and finds the closest town to given coordinates.
|
||||||
|
func FindNearestTown(lat, lon float64) (Town, error) {
|
||||||
|
townsPath := filepath.Join(config.DataDir, "towns.json")
|
||||||
|
|
||||||
|
data, err := os.ReadFile(townsPath)
|
||||||
|
if err != nil {
|
||||||
|
return Town{}, fmt.Errorf("failed to read town index: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var towns []Town
|
||||||
|
if err := json.Unmarshal(data, &towns); err != nil {
|
||||||
|
return Town{}, fmt.Errorf("failed to parse towns.json: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(towns) == 0 {
|
||||||
|
return Town{}, fmt.Errorf("no towns found in index")
|
||||||
|
}
|
||||||
|
|
||||||
|
minDist := math.MaxFloat64
|
||||||
|
var nearest Town
|
||||||
|
|
||||||
|
for _, t := range towns {
|
||||||
|
d := Haversine(lat, lon, t.Lat, t.Lon) // ✅ use shared helper from stations.go
|
||||||
|
if d < minDist {
|
||||||
|
minDist = d
|
||||||
|
nearest = t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if nearest.Name == "" {
|
||||||
|
return Town{}, fmt.Errorf("no nearby town found")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[geo] Nearest town: %s, %s (%.2f km)\n", nearest.Name, nearest.Province, minDist)
|
||||||
|
return nearest, nil
|
||||||
|
}
|
||||||
135
internal/geo/towns_updater.go
Normal file
135
internal/geo/towns_updater.go
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
package geo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Official Geographical Names Board of Canada WFS API
|
||||||
|
// Docs: https://www.nrcan.gc.ca/earth-sciences/geography/geographical-names-board-canada/download-geographical-names-data/10786
|
||||||
|
gnbcAPIURL = "https://geogratis.gc.ca/geonames/servlet/com.gc.ccra.geonames.webservices.GeographicalNamesService?service=WFS&request=GetFeature&version=2.0.0&typeNames=geonames:geoname_eng&outputFormat=json&featureCode=PPL"
|
||||||
|
townsFile = "towns.json"
|
||||||
|
maxFetchTime = 5 * time.Minute
|
||||||
|
)
|
||||||
|
|
||||||
|
// TownRecord represents a single Canadian town.
|
||||||
|
type TownRecord struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Province string `json:"province"`
|
||||||
|
Lat float64 `json:"lat"`
|
||||||
|
Lon float64 `json:"lon"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnsureTownIndexUpToDate checks if the towns index needs updating (monthly).
|
||||||
|
func EnsureTownIndexUpToDate() error {
|
||||||
|
dest := filepath.Join(config.DataDir, townsFile)
|
||||||
|
|
||||||
|
info, err := os.Stat(dest)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
fmt.Println("[geo] No town index found, downloading...")
|
||||||
|
return downloadTownIndex(dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to check town index: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
modTime := info.ModTime().UTC()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
firstOfMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, time.UTC)
|
||||||
|
|
||||||
|
if modTime.Before(firstOfMonth) {
|
||||||
|
fmt.Println("[geo] Town index is older than this month, refreshing...")
|
||||||
|
return downloadTownIndex(dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[geo] Town index is current.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForceUpdateTownIndex forces an immediate rebuild.
|
||||||
|
func ForceUpdateTownIndex() error {
|
||||||
|
dest := filepath.Join(config.DataDir, townsFile)
|
||||||
|
fmt.Println("[geo] Forcing town index update...")
|
||||||
|
return downloadTownIndex(dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
// downloadTownIndex fetches and stores the Canadian town dataset.
|
||||||
|
func downloadTownIndex(dest string) error {
|
||||||
|
client := &http.Client{Timeout: maxFetchTime}
|
||||||
|
|
||||||
|
fmt.Println("[geo] Fetching town data from GNBC WFS API...")
|
||||||
|
resp, err := client.Get(gnbcAPIURL)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to fetch town dataset: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("unexpected HTTP status: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
raw, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read GNBC response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
towns, err := parseGNBCJSON(raw)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse GNBC JSON: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(towns, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to encode towns: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.WriteFile(dest, data, 0644); err != nil {
|
||||||
|
return fmt.Errorf("failed to write %s: %w", dest, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[geo] Town index updated → %s (%d towns)\n", dest, len(towns))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseGNBCJSON extracts relevant town info from the GNBC GeoJSON.
|
||||||
|
func parseGNBCJSON(data []byte) ([]TownRecord, error) {
|
||||||
|
var response struct {
|
||||||
|
Features []struct {
|
||||||
|
Properties struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Province string `json:"province"`
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
} `json:"properties"`
|
||||||
|
} `json:"features"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := json.Unmarshal(data, &response); err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid GNBC JSON: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var towns []TownRecord
|
||||||
|
for _, f := range response.Features {
|
||||||
|
p := f.Properties
|
||||||
|
if p.Name == "" || p.Province == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
towns = append(towns, TownRecord{
|
||||||
|
Name: p.Name,
|
||||||
|
Province: p.Province,
|
||||||
|
Lat: p.Latitude,
|
||||||
|
Lon: p.Longitude,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return towns, nil
|
||||||
|
}
|
||||||
101
internal/output/formatter.go
Normal file
101
internal/output/formatter.go
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
package output
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/weather"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Theme defines simple ANSI colour codes for terminal output.
|
||||||
|
var Theme = struct {
|
||||||
|
Reset string
|
||||||
|
White string
|
||||||
|
Cyan string
|
||||||
|
Yellow string
|
||||||
|
Blue string
|
||||||
|
Green string
|
||||||
|
Red string
|
||||||
|
Magenta string
|
||||||
|
}{
|
||||||
|
Reset: "\033[0m",
|
||||||
|
White: "\033[97m",
|
||||||
|
Cyan: "\033[96m",
|
||||||
|
Yellow: "\033[93m",
|
||||||
|
Blue: "\033[94m",
|
||||||
|
Green: "\033[92m",
|
||||||
|
Red: "\033[91m",
|
||||||
|
Magenta: "\033[95m",
|
||||||
|
}
|
||||||
|
|
||||||
|
// WeatherIcon maps normalized condition keywords to simple glyphs.
|
||||||
|
func WeatherIcon(condition string) string {
|
||||||
|
condition = strings.ToLower(condition)
|
||||||
|
switch {
|
||||||
|
case strings.Contains(condition, "sunny"):
|
||||||
|
return "☀"
|
||||||
|
case strings.Contains(condition, "clear"):
|
||||||
|
return "🌙"
|
||||||
|
case strings.Contains(condition, "partly"):
|
||||||
|
return "⛅"
|
||||||
|
case strings.Contains(condition, "cloud"):
|
||||||
|
return "☁"
|
||||||
|
case strings.Contains(condition, "rain"):
|
||||||
|
return "🌧"
|
||||||
|
case strings.Contains(condition, "snow"):
|
||||||
|
return "❄"
|
||||||
|
case strings.Contains(condition, "mixed"):
|
||||||
|
return "🌨"
|
||||||
|
case strings.Contains(condition, "fog"):
|
||||||
|
return "🌫"
|
||||||
|
case strings.Contains(condition, "thunder"):
|
||||||
|
return "⛈"
|
||||||
|
default:
|
||||||
|
return "❔"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatWeatherCLI returns a full formatted string for terminal output.
|
||||||
|
func FormatWeatherCLI(data weather.WeatherData, colored bool) string {
|
||||||
|
icon := WeatherIcon(data.Condition)
|
||||||
|
temp := fmt.Sprintf("%.1f°C", data.Temperature)
|
||||||
|
ts := parseTimestamp(data.Timestamp)
|
||||||
|
|
||||||
|
if colored {
|
||||||
|
return fmt.Sprintf("%s%s %s%s %s%s%s\n%sCondition:%s %s\n%sHumidity:%s %s%% %sWind:%s %s %s\n%sPressure:%s %s kPa %sUpdated:%s %s%s",
|
||||||
|
Theme.Cyan, icon, Theme.White, data.Station,
|
||||||
|
Theme.Yellow, temp, Theme.Reset,
|
||||||
|
Theme.Blue, Theme.Reset, data.Condition,
|
||||||
|
Theme.Blue, Theme.Reset, data.Humidity,
|
||||||
|
Theme.Blue, Theme.Reset, data.WindDir, data.WindSpeed,
|
||||||
|
Theme.Blue, Theme.Reset, data.Pressure,
|
||||||
|
Theme.Blue, Theme.Reset, ts.Format("15:04 MST"), Theme.Reset,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%s %s %s\nCondition: %s\nHumidity: %s%% Wind: %s %s\nPressure: %s kPa Updated: %s",
|
||||||
|
icon, data.Station, temp,
|
||||||
|
data.Condition,
|
||||||
|
data.Humidity, data.WindDir, data.WindSpeed,
|
||||||
|
data.Pressure, ts.Format("15:04 MST"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTimestamp safely parses an RFC3339 or other timestamp format.
|
||||||
|
func parseTimestamp(ts string) time.Time {
|
||||||
|
if ts == "" {
|
||||||
|
return time.Now()
|
||||||
|
}
|
||||||
|
t, err := time.Parse(time.RFC3339, ts)
|
||||||
|
if err != nil {
|
||||||
|
return time.Now()
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatForUI produces a compact version for GUI or embedded display.
|
||||||
|
func FormatForUI(data weather.WeatherData) string {
|
||||||
|
icon := WeatherIcon(data.Condition)
|
||||||
|
return fmt.Sprintf("%s %s %.1f°C — %s", icon, data.Station, data.Temperature, data.Condition)
|
||||||
|
}
|
||||||
16
internal/output/logger.go
Normal file
16
internal/output/logger.go
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
package output
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// Simple color-coded log helpers
|
||||||
|
func LogInfo(msg string) {
|
||||||
|
fmt.Printf("\033[36m[INFO]\033[0m %s\n", msg) // Cyan
|
||||||
|
}
|
||||||
|
|
||||||
|
func LogSuccess(msg string) {
|
||||||
|
fmt.Printf("\033[32m[SUCCESS]\033[0m %s\n", msg) // Green
|
||||||
|
}
|
||||||
|
|
||||||
|
func LogError(msg string) {
|
||||||
|
fmt.Printf("\033[31m[ERROR]\033[0m %s\n", msg) // Red
|
||||||
|
}
|
||||||
1
internal/scheduler/updater.go
Normal file
1
internal/scheduler/updater.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package scheduler
|
||||||
1
internal/ui/gui.go
Normal file
1
internal/ui/gui.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package ui
|
||||||
1
internal/ui/tui.go
Normal file
1
internal/ui/tui.go
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
package ui
|
||||||
84
internal/weather/cache.go
Normal file
84
internal/weather/cache.go
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
package weather
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Cache file path
|
||||||
|
var cacheFile = filepath.Join(config.DataDir, "last_weather.json")
|
||||||
|
|
||||||
|
// CacheTTL defines how long cached weather data is considered "fresh".
|
||||||
|
// Default: 30 minutes.
|
||||||
|
const CacheTTL = 30 * time.Minute
|
||||||
|
|
||||||
|
// LoadFromCache loads cached weather data if available and still fresh.
|
||||||
|
// Returns (WeatherData, isFresh, error)
|
||||||
|
func LoadFromCache() (WeatherData, bool, error) {
|
||||||
|
file, err := os.Open(cacheFile)
|
||||||
|
if err != nil {
|
||||||
|
return WeatherData{}, false, fmt.Errorf("no cache found")
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
var data WeatherData
|
||||||
|
if err := json.NewDecoder(file).Decode(&data); err != nil {
|
||||||
|
return WeatherData{}, false, fmt.Errorf("failed to decode cache: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ts, err := time.Parse(time.RFC3339, data.Timestamp)
|
||||||
|
if err != nil {
|
||||||
|
// handle legacy or non-RFC timestamps
|
||||||
|
return data, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
age := time.Since(ts)
|
||||||
|
if age > CacheTTL {
|
||||||
|
fmt.Printf("[weather] Cache is stale (%.0f min old)\n", age.Minutes())
|
||||||
|
return data, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[weather] Loaded fresh cache (", int(age.Minutes()), "min old )")
|
||||||
|
return data, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveToCache writes a WeatherData object to disk.
|
||||||
|
func SaveToCache(data WeatherData) error {
|
||||||
|
if err := os.MkdirAll(filepath.Dir(cacheFile), 0755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create cache dir: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := os.Create(cacheFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to write cache: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
data.Timestamp = time.Now().UTC().Format(time.RFC3339)
|
||||||
|
|
||||||
|
enc := json.NewEncoder(file)
|
||||||
|
enc.SetIndent("", " ")
|
||||||
|
if err := enc.Encode(data); err != nil {
|
||||||
|
return fmt.Errorf("failed to encode cache: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[weather] Cache saved →", cacheFile)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCache removes the current cached file if present.
|
||||||
|
func ClearCache() error {
|
||||||
|
if _, err := os.Stat(cacheFile); os.IsNotExist(err) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err := os.Remove(cacheFile); err != nil {
|
||||||
|
return fmt.Errorf("failed to clear cache: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Println("[weather] Cache cleared.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
139
internal/weather/fetch.go
Normal file
139
internal/weather/fetch.go
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
package weather
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings" // ✅ Required for province normalization
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/leaktechnologies/skyfeed/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// WeatherData holds simplified normalized current weather data.
|
||||||
|
type WeatherData struct {
|
||||||
|
Station string `json:"station"`
|
||||||
|
Temperature float64 `json:"temperature"`
|
||||||
|
Condition string `json:"condition"`
|
||||||
|
Humidity string `json:"humidity"`
|
||||||
|
Pressure string `json:"pressure"`
|
||||||
|
WindSpeed string `json:"wind_speed"`
|
||||||
|
WindDir string `json:"wind_dir"`
|
||||||
|
Timestamp string `json:"timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supported province folders in Environment Canada citypage_weather XML structure.
|
||||||
|
var provinceCodes = []string{
|
||||||
|
"AB", "BC", "MB", "NB", "NL", "NS", "NT", "NU",
|
||||||
|
"ON", "PE", "QC", "SK", "YT",
|
||||||
|
}
|
||||||
|
|
||||||
|
// FetchCurrent retrieves current weather from Environment Canada for any province.
|
||||||
|
func FetchCurrent(stationCode, province string) (WeatherData, error) {
|
||||||
|
if stationCode == "" {
|
||||||
|
return WeatherData{}, fmt.Errorf("no station code provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
// If province unknown, we’ll probe each possible province directory until one succeeds.
|
||||||
|
targetProvinces := provinceCodes
|
||||||
|
if province != "" {
|
||||||
|
targetProvinces = []string{strings.ToUpper(province)}
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastErr error
|
||||||
|
for _, prov := range targetProvinces {
|
||||||
|
url := fmt.Sprintf("https://dd.weather.gc.ca/citypage_weather/xml/%s/%s_e.xml", prov, stationCode)
|
||||||
|
fmt.Printf("[weather] Fetching current weather for %s in %s...\n", stationCode, prov)
|
||||||
|
|
||||||
|
client := &http.Client{Timeout: 15 * time.Second}
|
||||||
|
resp, err := client.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
lastErr = err
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
lastErr = fmt.Errorf("HTTP %s", resp.Status)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return WeatherData{}, fmt.Errorf("failed to read EC XML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed struct {
|
||||||
|
XMLName xml.Name `xml:"siteData"`
|
||||||
|
Location string `xml:"location>name"`
|
||||||
|
CurrentConditions struct {
|
||||||
|
Temperature string `xml:"temperature"`
|
||||||
|
Condition string `xml:"condition"`
|
||||||
|
RelativeHumidity string `xml:"relativeHumidity"`
|
||||||
|
Pressure string `xml:"pressure"`
|
||||||
|
Wind struct {
|
||||||
|
Speed string `xml:"speed"`
|
||||||
|
Direction string `xml:"direction"`
|
||||||
|
} `xml:"wind"`
|
||||||
|
} `xml:"currentConditions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := xml.Unmarshal(body, &parsed); err != nil {
|
||||||
|
lastErr = fmt.Errorf("failed to parse EC XML: %w", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsed.CurrentConditions.Temperature == "" && parsed.CurrentConditions.Condition == "" {
|
||||||
|
lastErr = fmt.Errorf("no data for %s in %s", stationCode, prov)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
temp, _ := strconv.ParseFloat(parsed.CurrentConditions.Temperature, 64)
|
||||||
|
|
||||||
|
data := WeatherData{
|
||||||
|
Station: parsed.Location,
|
||||||
|
Temperature: temp,
|
||||||
|
Condition: NormalizeCondition(parsed.CurrentConditions.Condition),
|
||||||
|
Humidity: parsed.CurrentConditions.RelativeHumidity,
|
||||||
|
Pressure: parsed.CurrentConditions.Pressure,
|
||||||
|
WindSpeed: parsed.CurrentConditions.Wind.Speed,
|
||||||
|
WindDir: parsed.CurrentConditions.Wind.Direction,
|
||||||
|
Timestamp: time.Now().UTC().Format(time.RFC3339),
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := SaveToCache(data); err != nil {
|
||||||
|
fmt.Println("[weather] Warning: failed to save cache:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("[SUCCESS] Current: %.1f°C, %s (%s)\n", data.Temperature, data.Condition, prov)
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if lastErr != nil {
|
||||||
|
return WeatherData{}, fmt.Errorf("no valid feed found for %s: %v", stationCode, lastErr)
|
||||||
|
}
|
||||||
|
return WeatherData{}, fmt.Errorf("failed to fetch weather for %s", stationCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadCached loads the last weather data from disk for offline fallback.
|
||||||
|
func LoadCached() (WeatherData, error) {
|
||||||
|
cachePath := filepath.Join(config.DataDir, "last_weather.json")
|
||||||
|
file, err := os.Open(cachePath)
|
||||||
|
if err != nil {
|
||||||
|
return WeatherData{}, fmt.Errorf("no cached weather found")
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
var data WeatherData
|
||||||
|
if err := json.NewDecoder(file).Decode(&data); err != nil {
|
||||||
|
return WeatherData{}, fmt.Errorf("failed to decode cache: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("[weather] Loaded cached weather →", data.Timestamp)
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
67
internal/weather/normalize.go
Normal file
67
internal/weather/normalize.go
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
package weather
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NormalizeCondition converts Environment Canada’s verbose condition text
|
||||||
|
// into a consistent, title-cased string suitable for display or icon mapping.
|
||||||
|
func NormalizeCondition(raw string) string {
|
||||||
|
raw = strings.TrimSpace(strings.ToLower(raw))
|
||||||
|
if raw == "" {
|
||||||
|
return "Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common regex cleanup patterns
|
||||||
|
replacements := map[*regexp.Regexp]string{
|
||||||
|
regexp.MustCompile(`(?i)snowshower|snow showers?`): "Snow",
|
||||||
|
regexp.MustCompile(`(?i)rainshower|rain showers?`): "Rain",
|
||||||
|
regexp.MustCompile(`(?i)freezing rain|ice pellets|sleet`): "Freezing Rain",
|
||||||
|
regexp.MustCompile(`(?i)flurries?`): "Light Snow",
|
||||||
|
regexp.MustCompile(`(?i)thunderstorms?|tstorms?`): "Thunderstorm",
|
||||||
|
regexp.MustCompile(`(?i)drizzle`): "Drizzle",
|
||||||
|
regexp.MustCompile(`(?i)fog patches?|mist|haze|smoke|ash`): "Fog",
|
||||||
|
regexp.MustCompile(`(?i)blowing snow|drifting snow`): "Blowing Snow",
|
||||||
|
regexp.MustCompile(`(?i)freezing drizzle`): "Freezing Drizzle",
|
||||||
|
regexp.MustCompile(`(?i)showers? mixed with flurries?`): "Mixed Rain/Snow",
|
||||||
|
regexp.MustCompile(`(?i)snow mixed with rain`): "Mixed Rain/Snow",
|
||||||
|
regexp.MustCompile(`(?i)mainly cloudy|mostly cloudy`): "Cloudy",
|
||||||
|
regexp.MustCompile(`(?i)mainly sunny|mostly sunny|sunny`): "Sunny",
|
||||||
|
regexp.MustCompile(`(?i)partly cloudy|a few clouds`): "Partly Cloudy",
|
||||||
|
regexp.MustCompile(`(?i)clear|fair`): "Clear",
|
||||||
|
regexp.MustCompile(`(?i)rain and snow|rain/snow`): "Mixed Rain/Snow",
|
||||||
|
regexp.MustCompile(`(?i)light rain`): "Light Rain",
|
||||||
|
regexp.MustCompile(`(?i)heavy rain`): "Heavy Rain",
|
||||||
|
regexp.MustCompile(`(?i)light snow`): "Light Snow",
|
||||||
|
regexp.MustCompile(`(?i)heavy snow`): "Heavy Snow",
|
||||||
|
}
|
||||||
|
|
||||||
|
for pattern, replacement := range replacements {
|
||||||
|
if pattern.MatchString(raw) {
|
||||||
|
raw = pattern.ReplaceAllString(raw, replacement)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collapse multiple spaces and ensure proper capitalization
|
||||||
|
raw = strings.Join(strings.Fields(raw), " ")
|
||||||
|
raw = strings.Title(raw)
|
||||||
|
|
||||||
|
// Final normalization
|
||||||
|
switch raw {
|
||||||
|
case "Cloudy Periods":
|
||||||
|
raw = "Cloudy"
|
||||||
|
case "Mostly Cloudy":
|
||||||
|
raw = "Cloudy"
|
||||||
|
case "Mostly Sunny":
|
||||||
|
raw = "Sunny"
|
||||||
|
}
|
||||||
|
|
||||||
|
return raw
|
||||||
|
}
|
||||||
|
|
||||||
|
// NormalizeWeatherData standardizes the Condition field in WeatherData.
|
||||||
|
func NormalizeWeatherData(data WeatherData) WeatherData {
|
||||||
|
data.Condition = NormalizeCondition(data.Condition)
|
||||||
|
return data
|
||||||
|
}
|
||||||
36
scripts/update_geoip.sh
Executable file
36
scripts/update_geoip.sh
Executable file
|
|
@ -0,0 +1,36 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
: "${MAXMIND_LICENSE:?Set MAXMIND_LICENSE env var before running}"
|
||||||
|
|
||||||
|
DATA_DIR="${HOME}/.local/share/skyfeed"
|
||||||
|
TMP_DIR="$(mktemp -d)"
|
||||||
|
OUT_TAR="$TMP_DIR/geolite.tar.gz"
|
||||||
|
DEST_MMDB="$DATA_DIR/GeoLite2-City.mmdb"
|
||||||
|
|
||||||
|
mkdir -p "$DATA_DIR"
|
||||||
|
|
||||||
|
# MaxMind download URL pattern (uses license_key and suffix)
|
||||||
|
URL="https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key=${MAXMIND_LICENSE}&suffix=tar.gz"
|
||||||
|
|
||||||
|
echo "[update_geoip] Downloading GeoLite2-City..."
|
||||||
|
curl -sSL -A "Skyfeed/1.0 (+https://leaktechnologies.dev)" -o "$OUT_TAR" "$URL"
|
||||||
|
|
||||||
|
echo "[update_geoip] Extracting .mmdb..."
|
||||||
|
# Tar contains folder like GeoLite2-City_YYYYMMDD/GeoLite2-City.mmdb
|
||||||
|
tar -xzf "$OUT_TAR" -C "$TMP_DIR"
|
||||||
|
|
||||||
|
MMDB_PATH=$(find "$TMP_DIR" -type f -name "GeoLite2-City.mmdb" | head -n1)
|
||||||
|
if [ -z "$MMDB_PATH" ]; then
|
||||||
|
echo "[update_geoip] ERROR: .mmdb not found in archive" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# atomic replace
|
||||||
|
tmp_dest="${DEST_MMDB}.tmp"
|
||||||
|
cp "$MMDB_PATH" "$tmp_dest"
|
||||||
|
sync
|
||||||
|
mv -f "$tmp_dest" "$DEST_MMDB"
|
||||||
|
|
||||||
|
echo "[update_geoip] Installed $DEST_MMDB"
|
||||||
|
rm -rf "$TMP_DIR"
|
||||||
Reference in New Issue
Block a user