diff --git a/.env.example b/.env.example index 074bee9..1127dc5 100644 --- a/.env.example +++ b/.env.example @@ -3,3 +3,8 @@ TPDB_API_KEY=your-api-key-here # Adult Empire API Key (if enabled) AE_API_KEY=your-api-key-here + +# StashDB / stash-box +STASHDB_API_KEY=your-api-key-here +# Optional custom endpoint (default stashdb.org GraphQL URL) +STASHDB_ENDPOINT=https://stashdb.org/graphql diff --git a/.gitignore b/.gitignore index a805100..2530041 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ # Cache directories /cache/ /tmp/ +/config/api_keys.json # Node modules (Bootstrap) node_modules/ diff --git a/README.md b/README.md index ce96dad..669a487 100644 --- a/README.md +++ b/README.md @@ -165,6 +165,7 @@ go run ./cmd/goondex performer-search "test" ### Scripts - `source scripts/env.sh` - Pin Go caches inside the repo (recommended before building) +- `source scripts/load-env.sh` - Load API keys from `.env.local` (or `.env`) without hardcoding them - `scripts/build.sh` - Build the CLI (`bin/goondex`) - `ADDR=localhost:8788 scripts/run.sh` - Build (if needed) and start the web UI - `scripts/test.sh` - Run `go test ./cmd/... ./internal/...` diff --git a/cmd/goondex/main.go b/cmd/goondex/main.go index b428193..f9c54ac 100644 --- a/cmd/goondex/main.go +++ b/cmd/goondex/main.go @@ -9,6 +9,7 @@ import ( "time" "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/config" "git.leaktechnologies.dev/stu/Goondex/internal/model" "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" "git.leaktechnologies.dev/stu/Goondex/internal/scraper/merger" @@ -19,7 +20,7 @@ import ( ) const tpdbAPIKeyEnvVar = "TPDB_API_KEY" -const tpdbEnabled = false +const tpdbEnabled = true var ( dbPath string @@ -440,7 +441,7 @@ func getTPDBAPIKey() (string, error) { return "", fmt.Errorf("TPDB integration is disabled. Use Adult Empire commands (e.g., 'goondex adultemp search-performer' and 'goondex adultemp scrape-performer ') to import data instead.") } - apiKey := os.Getenv(tpdbAPIKeyEnvVar) + apiKey := config.GetAPIKeys().TPDBAPIKey if apiKey == "" { return "", fmt.Errorf("%s environment variable is not set.\n%s", tpdbAPIKeyEnvVar, apiKeySetupInstructions()) } diff --git a/internal/config/keys.go b/internal/config/keys.go new file mode 100644 index 0000000..44051a6 --- /dev/null +++ b/internal/config/keys.go @@ -0,0 +1,96 @@ +package config + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "sync" +) + +// APIKeys holds external service credentials. +type APIKeys struct { + TPDBAPIKey string `json:"tpdb_api_key"` + AEAPIKey string `json:"ae_api_key"` + StashDBAPIKey string `json:"stashdb_api_key"` + StashDBEndpoint string `json:"stashdb_endpoint"` +} + +const apiKeysFile = "config/api_keys.json" + +var ( + keysMu sync.RWMutex + cached *APIKeys +) + +// GetAPIKeys returns the configured API keys, preferring the persisted file, falling back to environment variables. +func GetAPIKeys() APIKeys { + keysMu.RLock() + if cached != nil { + defer keysMu.RUnlock() + return *cached + } + keysMu.RUnlock() + + keys := APIKeys{} + + // Try file first. + if b, err := os.ReadFile(apiKeysFile); err == nil { + _ = json.Unmarshal(b, &keys) + } + + // Fallback to env if fields are empty. + if keys.TPDBAPIKey == "" { + keys.TPDBAPIKey = os.Getenv("TPDB_API_KEY") + } + if keys.AEAPIKey == "" { + keys.AEAPIKey = os.Getenv("AE_API_KEY") + } + if keys.StashDBAPIKey == "" { + keys.StashDBAPIKey = os.Getenv("STASHDB_API_KEY") + } + if keys.StashDBEndpoint == "" { + keys.StashDBEndpoint = os.Getenv("STASHDB_ENDPOINT") + } + if keys.StashDBEndpoint == "" { + keys.StashDBEndpoint = "https://stashdb.org/graphql" + } + + keysMu.Lock() + cached = &keys + keysMu.Unlock() + + return keys +} + +// SaveAPIKeys persists API keys to disk (config/api_keys.json) and updates cache. +func SaveAPIKeys(keys APIKeys) error { + keysMu.Lock() + defer keysMu.Unlock() + + // Normalize whitespace. + keys.TPDBAPIKey = strings.TrimSpace(keys.TPDBAPIKey) + keys.AEAPIKey = strings.TrimSpace(keys.AEAPIKey) + keys.StashDBAPIKey = strings.TrimSpace(keys.StashDBAPIKey) + keys.StashDBEndpoint = strings.TrimSpace(keys.StashDBEndpoint) + if keys.StashDBEndpoint == "" { + keys.StashDBEndpoint = "https://stashdb.org/graphql" + } + + if err := os.MkdirAll(filepath.Dir(apiKeysFile), 0o755); err != nil { + return fmt.Errorf("create config dir: %w", err) + } + + data, err := json.MarshalIndent(keys, "", " ") + if err != nil { + return fmt.Errorf("marshal keys: %w", err) + } + + if err := os.WriteFile(apiKeysFile, data, 0o600); err != nil { + return fmt.Errorf("write keys file: %w", err) + } + + cached = &keys + return nil +} diff --git a/internal/web/server.go b/internal/web/server.go index 3ca805b..cc4ec19 100644 --- a/internal/web/server.go +++ b/internal/web/server.go @@ -7,14 +7,17 @@ import ( "fmt" "html/template" "io/fs" + "log" "net/http" - "os" "strconv" + "strings" "time" "git.leaktechnologies.dev/stu/Goondex/internal/db" import_service "git.leaktechnologies.dev/stu/Goondex/internal/import" "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/config" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" "git.leaktechnologies.dev/stu/Goondex/internal/sync" ) @@ -78,6 +81,16 @@ func (s *Server) Start() error { mux.HandleFunc("/scenes/", s.handleSceneDetail) mux.HandleFunc("/movies", s.handleMovieList) mux.HandleFunc("/movies/", s.handleMovieDetail) + mux.HandleFunc("/settings", s.handleSettingsPage) + + // Adult Empire endpoints + mux.HandleFunc("/api/ae/import/performer", s.handleAEImportPerformer) + mux.HandleFunc("/api/ae/import/performer-by-url", s.handleAEImportPerformerByURL) + mux.HandleFunc("/api/ae/import/scene", s.handleAEImportScene) + mux.HandleFunc("/api/ae/import/scene-by-url", s.handleAEImportSceneByURL) + + // Settings endpoints + mux.HandleFunc("/api/settings/api-keys", s.handleAPISettingsKeys) // API mux.HandleFunc("/api/import/performer", s.handleAPIImportPerformer) @@ -106,6 +119,13 @@ func (s *Server) Start() error { return http.ListenAndServe(s.addr, mux) } +func (s *Server) render(w http.ResponseWriter, name string, data interface{}) { + if err := s.templates.ExecuteTemplate(w, name, data); err != nil { + log.Printf("template render error (%s): %v", name, err) + http.Error(w, fmt.Sprintf("template render error: %v", err), http.StatusInternalServerError) + } +} + // ============================================================================ // PAGE HANDLERS // ============================================================================ @@ -135,7 +155,7 @@ func (s *Server) handleDashboard(w http.ResponseWriter, r *http.Request) { "MovieCount": len(movies), } - s.templates.ExecuteTemplate(w, "dashboard.html", data) + s.render(w, "dashboard.html", data) } func (s *Server) handlePerformerList(w http.ResponseWriter, r *http.Request) { @@ -565,7 +585,7 @@ type APIResponse struct { Data interface{} `json:"data,omitempty"` } -const tpdbEnabled = false +const tpdbEnabled = true const tpdbDisabledMessage = "TPDB integration is disabled. Use the Adult Empire CLI commands to import and enrich data for now." func tpdbAPIKey() (string, error) { @@ -573,7 +593,7 @@ func tpdbAPIKey() (string, error) { return "", fmt.Errorf(tpdbDisabledMessage) } - apiKey := os.Getenv("TPDB_API_KEY") + apiKey := config.GetAPIKeys().TPDBAPIKey if apiKey == "" { return "", fmt.Errorf("TPDB_API_KEY not configured") } @@ -841,6 +861,213 @@ func (s *Server) handleAPISyncStatus(w http.ResponseWriter, r *http.Request) { }) } +// ============================================================================ +// Adult Empire API (search + scrape) +// ============================================================================ + +func (s *Server) handleAEImportPerformer(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Query string `json:"query"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil || strings.TrimSpace(req.Query) == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "query is required"}) + return + } + + scraper, err := adultemp.NewScraper() + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scraper init failed: %v", err)}) + return + } + + results, err := scraper.SearchPerformersByName(r.Context(), req.Query) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("search failed: %v", err)}) + return + } + + if len(results) == 0 { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "No performers found on Adult Empire"}) + return + } + + top := results[0] + data, err := scraper.ScrapePerformerByURL(r.Context(), top.URL) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scrape failed: %v", err)}) + return + } + + performer := scraper.ConvertPerformerToModel(data) + + store := db.NewPerformerStore(s.db) + if err := store.Create(performer); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("save failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %s from Adult Empire", performer.Name), + Data: map[string]interface{}{ + "name": performer.Name, + "url": top.URL, + }, + }) +} + +func (s *Server) handleAEImportPerformerByURL(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + URL string `json:"url"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil || strings.TrimSpace(req.URL) == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "url is required"}) + return + } + + scraper, err := adultemp.NewScraper() + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scraper init failed: %v", err)}) + return + } + + data, err := scraper.ScrapePerformerByURL(r.Context(), req.URL) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scrape failed: %v", err)}) + return + } + + performer := scraper.ConvertPerformerToModel(data) + store := db.NewPerformerStore(s.db) + if err := store.Create(performer); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("save failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %s from Adult Empire", performer.Name), + Data: map[string]interface{}{ + "name": performer.Name, + "url": req.URL, + }, + }) +} + +func (s *Server) handleAEImportScene(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Query string `json:"query"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil || strings.TrimSpace(req.Query) == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "query is required"}) + return + } + + scraper, err := adultemp.NewScraper() + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scraper init failed: %v", err)}) + return + } + + results, err := scraper.SearchScenesByName(r.Context(), req.Query) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("search failed: %v", err)}) + return + } + + if len(results) == 0 { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "No scenes found on Adult Empire"}) + return + } + + top := results[0] + data, err := scraper.ScrapeSceneByURL(r.Context(), top.URL) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scrape failed: %v", err)}) + return + } + + scene := scraper.ConvertSceneToModel(data) + sceneStore := db.NewSceneStore(s.db) + + if err := sceneStore.Create(scene); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("save failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported scene: %s", scene.Title), + Data: map[string]interface{}{ + "title": scene.Title, + "url": top.URL, + }, + }) +} + +func (s *Server) handleAEImportSceneByURL(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + URL string `json:"url"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil || strings.TrimSpace(req.URL) == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "url is required"}) + return + } + + scraper, err := adultemp.NewScraper() + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scraper init failed: %v", err)}) + return + } + + data, err := scraper.ScrapeSceneByURL(r.Context(), req.URL) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("scrape failed: %v", err)}) + return + } + + scene := scraper.ConvertSceneToModel(data) + sceneStore := db.NewSceneStore(s.db) + + if err := sceneStore.Create(scene); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("save failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported scene: %s", scene.Title), + Data: map[string]interface{}{ + "title": scene.Title, + "url": req.URL, + }, + }) +} + // ============================================================================ // BULK IMPORT ENDPOINTS + SSE PROGRESS // ============================================================================ @@ -1128,3 +1355,75 @@ func (s *Server) handleAPIGlobalSearch(w http.ResponseWriter, r *http.Request) { Data: results, }) } + +// ============================================================================ +// SETTINGS +// ============================================================================ + +func (s *Server) handleSettingsPage(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/settings" { + http.NotFound(w, r) + return + } + + data := map[string]interface{}{ + "PageTitle": "Settings", + "ActivePage": "settings", + } + + s.render(w, "settings.html", data) +} + +type apiKeysPayload struct { + TPDBAPIKey string `json:"tpdb_api_key"` + AEAPIKey string `json:"ae_api_key"` + StashDBAPIKey string `json:"stashdb_api_key"` + StashDBEndpoint string `json:"stashdb_endpoint"` +} + +func (s *Server) handleAPISettingsKeys(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + keys := config.GetAPIKeys() + resp := map[string]interface{}{ + "tpdbConfigured": keys.TPDBAPIKey != "", + "aeConfigured": keys.AEAPIKey != "", + "stashdbConfigured": keys.StashDBAPIKey != "", + "stashdbEndpoint": keys.StashDBEndpoint, + "tpdb_api_key": keys.TPDBAPIKey, // local-only UI; if you prefer, mask these + "ae_api_key": keys.AEAPIKey, + "stashdb_api_key": keys.StashDBAPIKey, + "stashdb_endpoint": keys.StashDBEndpoint, // duplicate for UI convenience + } + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: "OK", + Data: resp, + }) + case http.MethodPost: + var payload apiKeysPayload + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "Invalid JSON"}) + return + } + + keys := config.APIKeys{ + TPDBAPIKey: payload.TPDBAPIKey, + AEAPIKey: payload.AEAPIKey, + StashDBAPIKey: payload.StashDBAPIKey, + StashDBEndpoint: payload.StashDBEndpoint, + } + + if err := config.SaveAPIKeys(keys); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Failed to save keys: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: "API keys saved", + }) + default: + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + } +} diff --git a/internal/web/static/css/style.css b/internal/web/static/css/style.css index 9b25d25..9d6c2fb 100644 --- a/internal/web/static/css/style.css +++ b/internal/web/static/css/style.css @@ -524,6 +524,22 @@ main.container { color: var(--color-text-secondary); } +.status-banner { + display: none; + padding: 0.75rem 1rem; + margin-top: 1rem; + border-radius: 8px; + border: 1px solid var(--color-border); + background: var(--color-bg-elevated); + color: var(--color-text-primary); + font-size: 0.95rem; +} + +.status-banner.error { + border-color: #ff8a8a; + color: #ff8a8a; +} + /* Detail views */ .breadcrumb { margin-bottom: 1.5rem; diff --git a/internal/web/static/img/logo/GOONDEX_Titty.svg b/internal/web/static/img/logo/GOONDEX_Titty.svg new file mode 100644 index 0000000..2120d1b --- /dev/null +++ b/internal/web/static/img/logo/GOONDEX_Titty.svg @@ -0,0 +1,91 @@ + + + + + + + + + + + + + + + + + + diff --git a/internal/web/static/img/logo/GOONDEX_logo.png b/internal/web/static/img/logo/GOONDEX_logo.png deleted file mode 100644 index c9b1a40..0000000 Binary files a/internal/web/static/img/logo/GOONDEX_logo.png and /dev/null differ diff --git a/internal/web/static/img/logo/GOONDEX_logo.svg b/internal/web/static/img/logo/GOONDEX_logo.svg index be57e65..e494e72 100644 --- a/internal/web/static/img/logo/GOONDEX_logo.svg +++ b/internal/web/static/img/logo/GOONDEX_logo.svg @@ -23,16 +23,16 @@ inkscape:pagecheckerboard="0" inkscape:deskcolor="#505050" inkscape:document-units="px" - inkscape:zoom="2.1896304" - inkscape:cx="884.39583" - inkscape:cy="34.252356" + inkscape:zoom="2.8284271" + inkscape:cx="1382.9241" + inkscape:cy="89.095455" inkscape:window-width="1920" inkscape:window-height="1011" inkscape:window-x="0" inkscape:window-y="0" inkscape:window-maximized="1" inkscape:current-layer="g9" - showgrid="false"> + showgrid="true"> + bleed="0" + inkscape:export-filename="Goondex_LOGO2.png" + inkscape:export-xdpi="96" + inkscape:export-ydpi="96" /> + visible="true" /> + + @@ -139,14 +158,30 @@ id="path13" style="font-size:86.3973px;font-family:'Gmarket Sans';-inkscape-font-specification:'Gmarket Sans, Normal';stroke-width:7.85855;fill:#ff5fa2;fill-opacity:1" d="M 173.33789 50.472656 C 150.42237 50.472656 133.54297 67.165118 133.54297 89.986328 C 133.54297 112.80755 150.51808 129.49805 173.43359 129.49805 C 184.72527 129.49805 194.54994 125.44543 201.61328 118.60352 C 208.69985 125.44543 218.55013 129.49805 229.8418 129.49805 C 252.75733 129.49805 269.63672 112.80755 269.63672 89.986328 C 269.63672 67.165118 252.66358 50.472656 229.74805 50.472656 C 218.45638 50.472656 208.62975 54.525269 201.56641 61.367188 C 194.47983 54.525267 184.62956 50.472656 173.33789 50.472656 z M 173.33789 59.525391 C 182.39788 59.525391 190.21021 63.008763 195.58789 68.896484 C 198.21228 71.769779 200.25728 75.215888 201.58398 79.109375 C 202.9042 75.210676 204.94121 71.760371 207.55859 68.884766 C 212.91125 63.004031 220.69398 59.525391 229.74805 59.525391 C 247.00542 59.525391 259.73633 72.163146 259.73633 89.986328 C 259.73633 107.71522 247.00486 120.44531 229.8418 120.44531 C 220.78934 120.44531 212.98272 116.94263 207.60547 111.05273 C 204.97114 108.16726 202.91866 104.70856 201.58984 100.80859 C 200.2638 104.70381 198.21994 108.15962 195.5957 111.04297 C 190.22932 116.93923 182.44196 120.44531 173.43359 120.44531 C 156.17623 120.44531 143.44531 107.71522 143.44531 89.986328 C 143.44531 72.163146 156.08053 59.525391 173.33789 59.525391 z M 172.58594 100.67578 C 170.48224 100.6759 168.77728 102.38262 168.7793 104.48633 C 168.77939 106.58856 170.48372 108.2909 172.58594 108.29102 C 174.68815 108.2909 176.39244 106.58856 176.39258 104.48633 C 176.39458 102.38262 174.68964 100.6759 172.58594 100.67578 z M 229.05078 100.67578 C 226.94706 100.6759 225.24216 102.38262 225.24414 104.48633 C 225.24427 106.58856 226.94852 108.2909 229.05078 108.29102 C 231.153 108.2909 232.8573 106.58856 232.85742 104.48633 C 232.85942 102.38262 231.15448 100.6759 229.05078 100.67578 z " /> + + + + + + d="m 1463.5643,67.636337 c -2.6247,1.49e-4 -4.7519,2.129552 -4.7493,4.754267 10e-5,2.62286 2.1265,4.74679 4.7493,4.74695 2.623,-1.6e-4 4.7491,-2.12409 4.7495,-4.74695 0,-2.624715 -2.1248,-4.754118 -4.7495,-4.754267 z m 70.4489,0 c -2.6247,1.49e-4 -4.7518,2.129552 -4.7495,4.754267 2e-4,2.62286 2.1265,4.74679 4.7495,4.74695 2.6228,-1.6e-4 4.7492,-2.12409 4.7493,-4.74695 0,-2.624715 -2.1246,-4.754118 -4.7493,-4.754267 z" + style="font-size:86.3973px;font-family:'Gmarket Sans';-inkscape-font-specification:'Gmarket Sans, Normal';fill:#8a6f91;fill-opacity:1;stroke-width:9.80478" + id="path1-8" /> diff --git a/internal/web/static/js/app.js b/internal/web/static/js/app.js index 4f50e48..5501ea1 100644 --- a/internal/web/static/js/app.js +++ b/internal/web/static/js/app.js @@ -275,6 +275,106 @@ function copyToClipboard(text) { }); } +// ============================================================================ +// Adult Empire UI helpers +// ============================================================================ + +function setAEStatus(msg, isError = false) { + const el = document.getElementById('ae-status'); + if (!el) return; + el.textContent = msg; + el.classList.toggle('error', !!isError); + el.style.display = msg ? 'block' : 'none'; +} + +async function aeImportPerformerByName() { + const name = prompt('Import performer by name (Adult Empire):'); + if (!name) return; + setAEStatus(`Searching Adult Empire for "${name}"...`); + try { + const res = await fetch('/api/ae/import/performer', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: name }) + }); + const result = await res.json(); + if (result.success) { + setAEStatus(result.message); + setTimeout(() => location.reload(), 1500); + } else { + setAEStatus(result.message || 'Import failed', true); + } + } catch (err) { + setAEStatus(`Error: ${err.message}`, true); + } +} + +async function aeImportPerformerByURL() { + const url = prompt('Paste Adult Empire performer URL:'); + if (!url) return; + setAEStatus('Importing performer from Adult Empire URL...'); + try { + const res = await fetch('/api/ae/import/performer-by-url', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ url }) + }); + const result = await res.json(); + if (result.success) { + setAEStatus(result.message); + setTimeout(() => location.reload(), 1500); + } else { + setAEStatus(result.message || 'Import failed', true); + } + } catch (err) { + setAEStatus(`Error: ${err.message}`, true); + } +} + +async function aeImportSceneByName() { + const title = prompt('Import scene by title (Adult Empire):'); + if (!title) return; + setAEStatus(`Searching Adult Empire for "${title}"...`); + try { + const res = await fetch('/api/ae/import/scene', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: title }) + }); + const result = await res.json(); + if (result.success) { + setAEStatus(result.message); + setTimeout(() => location.reload(), 1500); + } else { + setAEStatus(result.message || 'Import failed', true); + } + } catch (err) { + setAEStatus(`Error: ${err.message}`, true); + } +} + +async function aeImportSceneByURL() { + const url = prompt('Paste Adult Empire scene URL:'); + if (!url) return; + setAEStatus('Importing scene from Adult Empire URL...'); + try { + const res = await fetch('/api/ae/import/scene-by-url', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ url }) + }); + const result = await res.json(); + if (result.success) { + setAEStatus(result.message); + setTimeout(() => location.reload(), 1500); + } else { + setAEStatus(result.message || 'Import failed', true); + } + } catch (err) { + setAEStatus(`Error: ${err.message}`, true); + } +} + function applyFilters() { // Hook for your search/filter logic console.log("Applying filters…"); @@ -396,7 +496,8 @@ async function importScene() { } async function syncAll() { - const force = document.getElementById('sync-force').checked; + const forceEl = document.getElementById('sync-force'); + const force = forceEl ? forceEl.checked : false; setImportStatus('sync', 'Syncing all data from TPDB...', false); try { diff --git a/internal/web/templates/dashboard.html b/internal/web/templates/dashboard.html index b743155..2d2d60e 100644 --- a/internal/web/templates/dashboard.html +++ b/internal/web/templates/dashboard.html @@ -93,16 +93,16 @@

Welcome to Goondex

-

Adult Empire-first indexer (TPDB temporarily disabled)

+

TPDB bulk imports with Adult Empire enrichment

- -
@@ -126,8 +126,8 @@
View all → -
@@ -142,10 +142,6 @@
View all → -
@@ -158,8 +154,8 @@
View all → -
@@ -178,39 +174,71 @@
- +
-

Import from Adult Empire (CLI)

+

TPDB Import & Sync

- TPDB bulk import is temporarily disabled. Use the Adult Empire CLI to seed your library with high-quality mainstream data: + Run bulk imports from TPDB, then enrich with AE/StashDB. Keep it running to build a complete base.

- - - - + +
+ +
+
+
+ + +
+

Adult Empire Imports

+

+ Import directly from Adult Empire via the UI with built-in progress feedback. +

+ +
+ + + +

- Movies: scraper not finished yet. Use scene/performer imports for now. + Movies: scraper not finished yet. Use performer/scene imports for now.

-

- Bulk “Import All” buttons will stay disabled until an Adult Empire bulk flow is available. -

+
diff --git a/internal/web/templates/layout.html b/internal/web/templates/layout.html index 3ac57a9..fd44c32 100644 --- a/internal/web/templates/layout.html +++ b/internal/web/templates/layout.html @@ -25,7 +25,7 @@