diff --git a/.gitignore b/.gitignore index ac850e2..0a60d04 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,52 @@ /cache/ /tmp/ +# Media & Assets (images, galleries, downloads) +# Ignore all assets except logos and README +/assets/* +!/assets/logo/ +!/assets/README.md +/images/ +/galleries/ +/media/ +/downloads/ + +# Performer images +performers/ +performer_images/ +**/performers/*.jpg +**/performers/*.jpeg +**/performers/*.png +**/performers/*.gif +**/performers/*.webp + +# Scene images & posters +scenes/ +scene_images/ +**/scenes/*.jpg +**/scenes/*.jpeg +**/scenes/*.png +**/scenes/*.gif +**/scenes/*.webp + +# Studio logos +studios/ +studio_images/ +**/studios/*.jpg +**/studios/*.jpeg +**/studios/*.png +**/studios/*.gif +**/studios/*.webp + +# Any image files in data directories +/data/**/*.jpg +/data/**/*.jpeg +/data/**/*.png +/data/**/*.gif +/data/**/*.webp +/data/**/*.mp4 +/data/**/*.webm + # IDE .vscode/ .idea/ @@ -42,3 +88,12 @@ Thumbs.db # Go workspace go.work go.work.sum + +# Static web assets (downloaded images) +internal/web/static/images/ +internal/web/static/media/ +internal/web/static/uploads/ + +# User data +/user_data/ +/metadata/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..5a74755 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,76 @@ +# Changelog + +## v0.1.0-dev4 (2025-11-16) + +### 🎨 Web UI Enhancements +- **Grid Layout Migration**: Converted all listing pages (Performers, Studios, Scenes) from table-based to modern card grid layout + - Uses GX_CardGrid component system with responsive design + - Performers: 3:4 aspect ratio portrait cards with scene count and nationality + - Studios: 3:4 aspect ratio cards with description preview + - Scenes: 16:9 aspect ratio landscape cards with date, studio, and code + - Hover effects with neon pink glow + - Mobile-responsive grid (auto-fills from 160px to 220px cards) + +- **Updated Navigation**: All pages now use consistent navbar with logo +- **Enhanced Search Forms**: Updated to use GX button components with hover effects +- **Improved Styling**: All pages now load `goondex.css` instead of `style.css` + +### 🌐 Adult Empire Integration +- **Complete Scraper Implementation** + - HTTP client with cookie jar for session management + - XPath-based HTML parsing (no official API available) + - Scene scraping: title, date, studio, performers, tags, description, cover art + - Performer scraping: bio, measurements, birthday, ethnicity, aliases, images + - Search functionality for both scenes and performers + +- **CLI Commands**: + - `adultemp search-scene [query]` - Search for scenes + - `adultemp search-performer [name]` - Search for performers + - `adultemp scrape-scene [url]` - Scrape and import a scene + - `adultemp scrape-performer [url]` - Scrape and import a performer + - `adultemp merge-performer [id] [url]` - Merge Adult Empire data into existing performer + - Optional `--etoken` flag for authenticated access + +### 🔄 Data Merging & Update System +- **Intelligent Data Merger** + - New `merger` package for combining data from multiple sources + - TPDB data takes priority, Adult Empire fills in gaps + - Smart name matching algorithm (70% word overlap threshold) + - Merges: bio, aliases, measurements, physical attributes + - Preserves high-quality TPDB images over Adult Empire + +- **Performer Update Command** + - `performer-update [id]` - Refresh performer from TPDB + - Automatically searches Adult Empire for supplemental data + - Shows potential matches for manual merging + +### 📚 Documentation +- Created comprehensive `docs/ADULT_EMPIRE_SCRAPER.md` + - Architecture overview with diagrams + - API reference for all scraper methods + - XPath selector documentation + - Authentication guide (etoken cookie) + - Troubleshooting section + - Comparison with TPDB scraper + +### 🐛 Bug Fixes & Improvements +- Fixed variable shadowing in `joinStrings()` function +- Added missing dependencies for HTML parsing (`golang.org/x/text/*`) +- Riley Reid investigation: Performer exists (ID: 20029) but has 0 scenes (scene linking issue) + +### 📦 New Dependencies +- `github.com/antchfx/htmlquery` - XPath HTML parsing +- `github.com/antchfx/xpath` - XPath query engine +- `golang.org/x/net/html` - HTML parsing +- `golang.org/x/text/*` - Text encoding support + +--- + +## v0.1.0-dev3 (Previous) +Complete TPDB metadata with duplicate prevention + +## v0.1.0-dev2 +Full TPDB integration with auto-fetch and comprehensive docs + +## v0.1.0-dev1 +Initial release with basic TPDB functionality diff --git a/SESSION_SUMMARY_v0.1.0-dev4.md b/SESSION_SUMMARY_v0.1.0-dev4.md new file mode 100644 index 0000000..8066465 --- /dev/null +++ b/SESSION_SUMMARY_v0.1.0-dev4.md @@ -0,0 +1,325 @@ +# Goondex Session Summary - v0.1.0-dev4 + +**Date**: 2025-11-16 +**Session Focus**: Adult Empire Integration, Grid UI, Multi-Source Data Merging, Movies Feature + +--- + +## 🎯 Major Features Completed + +### 1. Adult Empire Scraper (COMPLETE ✅) +Full HTML scraping implementation for Adult Empire (adultdvdempire.com): + +**Files Created:** +- `internal/scraper/adultemp/types.go` - Data structures +- `internal/scraper/adultemp/client.go` - HTTP client with cookies +- `internal/scraper/adultemp/xpath.go` - XPath parsing utilities +- `internal/scraper/adultemp/scraper.go` - Main scraper +- `docs/ADULT_EMPIRE_SCRAPER.md` - Complete documentation + +**CLI Commands:** +```bash +# Search +./goondex adultemp search-scene "title" +./goondex adultemp search-performer "name" + +# Scrape & Import +./goondex adultemp scrape-scene [url] +./goondex adultemp scrape-performer [url] + +# Merge with existing data +./goondex adultemp merge-performer [id] [url] + +# Optional authentication +--etoken "your-cookie-value" +``` + +### 2. Multi-Source Data Merging (COMPLETE ✅) +Intelligent data combination from TPDB + Adult Empire: + +**Files Created:** +- `internal/scraper/merger/performer_merger.go` + +**Features:** +- TPDB data takes priority (higher quality) +- Adult Empire fills in missing fields +- Smart name matching (70% word overlap) +- Merges: bio, aliases, measurements, physical attributes +- Prevents incorrect merges with confirmation prompt + +**CLI Command:** +```bash +./goondex performer-update [id] # Refreshes from TPDB + searches Adult Empire +``` + +### 3. Grid-Based Web UI (COMPLETE ✅) +Converted all listing pages to modern card grids: + +**Files Modified:** +- `internal/web/templates/performers.html` - Grid layout +- `internal/web/templates/studios.html` - Grid layout +- `internal/web/templates/scenes.html` - Grid layout (16:9 ratio) +- `internal/web/static/css/goondex.css` - Added GX_CardGrid import + +**Features:** +- Responsive grid layout (auto-fills 220px-280px cards) +- Performers: 3:4 portrait ratio +- Scenes: 16:9 landscape ratio +- Hover effects with neon pink glow +- Mobile-responsive +- Uses your existing GX component library + +### 4. Movies Feature (COMPLETE ✅) +New Movies entity separate from Scenes: + +**Files Created:** +- `internal/model/movie.go` - Movie model +- `internal/db/movie_store.go` - CRUD operations + +**Database Schema Added:** +- `movies` table +- `movie_scenes` junction table (links scenes to movies) +- `movie_performers` junction table +- `movie_tags` junction table +- Indexes for performance + +**Relationships:** +- Movies contain multiple Scenes +- Scenes can belong to a Movie +- Movies link to Studios, Performers, Tags + +### 5. Bulk Import System (COMPLETE ✅) +Import ALL performers with pagination: + +**CLI Command:** +```bash +# Import all 10,000 performers from TPDB +./goondex import all-performers + +# Resume from specific page +./goondex import all-performers --start-page 250 + +# Import limited number of pages +./goondex import all-performers --max-pages 50 +``` + +**Features:** +- Automatic pagination through all pages +- Duplicate detection (skips existing) +- Progress tracking per page +- Resumable if interrupted +- Rate limiting (500ms delay between pages) +- Error handling + +--- + +## 📊 Current Database Status + +```sql +-- Your current data: +Performers: 9,994 / 10,000 (missing 6) +Studios: 59,752 (includes JAV) +Scenes: 0 (not imported yet) +Movies: 0 (new feature, ready to use) +``` + +**Why 59k studios?** TPDB includes both Western and Japanese (JAV) content. JAV has thousands of small studios, hence the high number. + +**Why 0 scenes?** You never imported scenes - only performers and studios. See "Next Steps" below. + +--- + +## 🚀 Next Steps to Get Full Data + +### Step 1: Complete Performer Import (Missing 6) +```bash +# This will skip your 9,994 existing performers and only import the 6 missing ones +./goondex import all-performers +``` + +### Step 2: Import Scenes (NEEDED!) +```bash +# Check TPDB API for total scenes +curl -s "https://api.theporndb.net/scenes?page=1" \ + -H "Authorization: Bearer $TPDB_API_KEY" | jq '.meta' + +# Create bulk scene import command (similar to all-performers) +# You'll need to implement: ./goondex import all-scenes +``` + +### Step 3: Movies Import +Movies need to be implemented in TPDB scraper: +- Check if TPDB API has movies endpoint +- Create bulk import command +- Or scrape from Adult Empire + +--- + +## 🛠️ Commands Reference + +### Import Commands +```bash +# Individual searches +./goondex import performer "Riley Reid" +./goondex import studio "Brazzers" +./goondex import scene "Scene Title" + +# Bulk imports +./goondex import all-performers +./goondex import all-studios # TODO: Create this +./goondex import all-scenes # TODO: Create this +``` + +### Adult Empire Commands +```bash +# Search +./goondex adultemp search-performer "Riley Reid" +./goondex adultemp search-scene "Scene Title" + +# Scrape & Import +./goondex adultemp scrape-performer [url] +./goondex adultemp scrape-scene [url] + +# Merge into existing +./goondex adultemp merge-performer 20029 [adultemp-url] +``` + +### Update Commands +```bash +./goondex performer-update [id] # Refresh from TPDB + Adult Empire +./goondex sync all # Sync all existing data +./goondex sync performers # Sync only performers +``` + +### Web UI +```bash +./goondex web --addr localhost:8080 +# Then visit: http://localhost:8080 +``` + +--- + +## 📁 File Structure Overview + +``` +Goondex/ +├── cmd/goondex/main.go # All CLI commands +├── internal/ +│ ├── db/ +│ │ ├── schema.go # Database schema (now includes movies) +│ │ ├── performer_store.go +│ │ ├── studio_store.go +│ │ ├── scene_store.go +│ │ └── movie_store.go # NEW: Movies CRUD +│ ├── model/ +│ │ ├── performer.go +│ │ ├── studio.go +│ │ ├── scene.go +│ │ └── movie.go # NEW: Movie model +│ ├── scraper/ +│ │ ├── tpdb/ # ThePornDB API +│ │ ├── adultemp/ # NEW: Adult Empire scraper +│ │ └── merger/ # NEW: Multi-source merging +│ └── web/ +│ ├── templates/ +│ │ ├── dashboard.html +│ │ ├── performers.html # UPDATED: Grid layout +│ │ ├── studios.html # UPDATED: Grid layout +│ │ ├── scenes.html # UPDATED: Grid layout +│ │ └── movies.html # TODO: Create this +│ └── static/css/ +│ ├── goondex.css # Master stylesheet +│ └── gx/ # Your GX component library +│ └── GX_CardGrid.css # Used by all grids +└── docs/ + └── ADULT_EMPIRE_SCRAPER.md # Complete scraper docs +``` + +--- + +## 🔍 Known Issues + +1. **Riley Reid has 0 scenes** - This is because you haven't imported any scenes yet +2. **Missing 6 performers** - Out of 10,000 total +3. **Movies UI not created** - Database ready, need to add web route and template + +--- + +## 💡 Recommended Actions + +1. **Import Missing Performers:** + ```bash + ./goondex import all-performers + ``` + +2. **Create Bulk Scene Import:** + Implement `./goondex import all-scenes` command (similar to all-performers) + +3. **Add Movies Web UI:** + Create `internal/web/templates/movies.html` using GX_CardGrid + +4. **Test Adult Empire Scraper:** + ```bash + ./goondex adultemp search-performer "Riley Reid" + ./goondex adultemp merge-performer 20029 [url-from-search] + ``` + +--- + +## 📈 Performance Notes + +- **Bulk Import Speed**: ~417 pages × 0.5s = ~3.5 minutes for all performers +- **Database Size**: 9,994 performers + 59,752 studios = ~200-300MB +- **Grid Rendering**: Optimized with CSS `will-change` and `transform` + +--- + +## 🎨 GX Components Used + +- `GX_CardGrid.css` - Main grid layout +- `GX_Button.css` - Button styling +- `GX_Input.css` - Search forms +- Ready for more: Dialog, Modal, Pagination, FilterBar, etc. + +--- + +## 🔗 Integration Points + +### TPDB API +- Base URL: `https://api.theporndb.net` +- Auth: `Authorization: Bearer $TPDB_API_KEY` +- Rate Limit: Enforced by sync system +- Pagination: 24 items per page + +### Adult Empire +- Base URL: `https://www.adultdvdempire.com` +- Auth: Optional `etoken` cookie +- Method: XPath HTML scraping +- No official API + +--- + +## 📝 Version Info + +```bash +./goondex version +# Output: +# Goondex v0.1.0-dev4 +# Features: +# • TPDB integration with auto-import +# • Adult Empire scraper (scenes & performers) +# • Multi-source data merging +# • Grid-based web UI with GX components +# • Performer/studio/scene/movie management +``` + +--- + +**End of Session Summary** + +All features requested have been implemented. The codebase is ready for: +1. Importing remaining data (scenes, movies) +2. Adding Movies web UI template +3. Full testing with real content + +Goondex now supports multi-source metadata aggregation with a modern UI! diff --git a/TAGGING_ARCHITECTURE.md b/TAGGING_ARCHITECTURE.md new file mode 100644 index 0000000..cd8a905 --- /dev/null +++ b/TAGGING_ARCHITECTURE.md @@ -0,0 +1,277 @@ +# Goondex Tagging System Architecture + +## Vision +Enable ML-driven search queries like: +- "3 black men in a scene where a blonde milf wears pink panties and black heels" +- Image-based scene detection and recommendation +- Auto-tagging from PornPics image imports + +## Core Requirements + +### 1. Tag Categories (Hierarchical Structure) +Tags need to be organized by category for efficient filtering and ML training: + +``` +performers/ + └─ [already implemented via performers table] + +people/ + ├─ count/ (1, 2, 3, 4, 5+, orgy, etc.) + ├─ ethnicity/ (black, white, asian, latina, etc.) + ├─ age_category/ (teen, milf, mature, etc.) + ├─ body_type/ (slim, athletic, curvy, bbw, etc.) + └─ hair/ + ├─ color/ (blonde, brunette, redhead, etc.) + └─ length/ (short, long, bald, etc.) + +clothing/ + ├─ type/ (lingerie, uniform, casual, etc.) + ├─ color/ (pink, black, red, white, etc.) + ├─ specific/ + ├─ top/ (bra, corset, tank_top, etc.) + ├─ bottom/ (panties, skirt, jeans, etc.) + └─ footwear/ (heels, boots, stockings, etc.) + +position/ + ├─ category/ (standing, lying, sitting, etc.) + └─ specific/ (missionary, doggy, cowgirl, etc.) + +action/ + ├─ sexual/ (oral, penetration, etc.) + └─ non_sexual/ (kissing, undressing, etc.) + +setting/ + ├─ location/ (bedroom, office, outdoor, etc.) + └─ time/ (day, night, etc.) + +production/ + ├─ quality/ (hd, 4k, vr, etc.) + └─ style/ (pov, amateur, professional, etc.) +``` + +### 2. Database Schema Extensions + +#### Enhanced Tags Table +```sql +CREATE TABLE IF NOT EXISTS tag_categories ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, -- e.g., "clothing/color" + parent_id INTEGER, -- for hierarchical categories + description TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (parent_id) REFERENCES tag_categories(id) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS tags ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, -- e.g., "pink" + category_id INTEGER NOT NULL, -- links to "clothing/color" + aliases TEXT, -- comma-separated: "hot pink,rose" + description TEXT, + source TEXT, -- tpdb, user, ml + source_id TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + UNIQUE(category_id, name), + FOREIGN KEY (category_id) REFERENCES tag_categories(id) ON DELETE CASCADE +); + +-- Enhanced scene-tag junction with ML confidence +CREATE TABLE IF NOT EXISTS scene_tags ( + scene_id INTEGER NOT NULL, + tag_id INTEGER NOT NULL, + confidence REAL DEFAULT 1.0, -- 0.0-1.0 for ML predictions + source TEXT NOT NULL DEFAULT 'user', -- 'user', 'ml', 'tpdb' + verified BOOLEAN DEFAULT 0, -- human verification flag + created_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (scene_id, tag_id), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE, + FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE +); + +-- Track images associated with scenes (for ML training) +CREATE TABLE IF NOT EXISTS scene_images ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scene_id INTEGER NOT NULL, + image_url TEXT NOT NULL, + image_path TEXT, -- local storage path + source TEXT, -- pornpics, tpdb, user + source_id TEXT, + width INTEGER, + height INTEGER, + file_size INTEGER, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE +); + +-- ML model predictions for future reference +CREATE TABLE IF NOT EXISTS ml_predictions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scene_id INTEGER, + image_id INTEGER, + model_version TEXT NOT NULL, -- track which ML model made prediction + predictions TEXT NOT NULL, -- JSON: [{"tag_id": 123, "confidence": 0.95}, ...] + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE, + FOREIGN KEY (image_id) REFERENCES scene_images(id) ON DELETE CASCADE +); +``` + +#### Indexes for ML Performance +```sql +-- Tag search performance +CREATE INDEX IF NOT EXISTS idx_tags_category ON tags(category_id); +CREATE INDEX IF NOT EXISTS idx_tags_name ON tags(name); + +-- Scene tag filtering (critical for complex queries) +CREATE INDEX IF NOT EXISTS idx_scene_tags_tag ON scene_tags(tag_id); +CREATE INDEX IF NOT EXISTS idx_scene_tags_confidence ON scene_tags(confidence); +CREATE INDEX IF NOT EXISTS idx_scene_tags_verified ON scene_tags(verified); + +-- Image processing +CREATE INDEX IF NOT EXISTS idx_scene_images_scene ON scene_images(scene_id); +CREATE INDEX IF NOT EXISTS idx_scene_images_source ON scene_images(source, source_id); +``` + +### 3. Complex Query Architecture + +For queries like "3 black men + blonde milf + pink panties + black heels": + +```sql +-- Step 1: Find scenes with all required tags +WITH required_tags AS ( + SELECT scene_id, COUNT(DISTINCT tag_id) as tag_count + FROM scene_tags st + JOIN tags t ON st.tag_id = t.id + WHERE + (t.name = 'black' AND category_id = (SELECT id FROM tag_categories WHERE name = 'people/ethnicity')) + OR (t.name = 'blonde' AND category_id = (SELECT id FROM tag_categories WHERE name = 'people/hair/color')) + OR (t.name = 'pink' AND category_id = (SELECT id FROM tag_categories WHERE name = 'clothing/color')) + -- etc. + AND st.verified = 1 -- only human-verified tags + AND st.confidence >= 0.8 -- or ML predictions above threshold + GROUP BY scene_id + HAVING tag_count >= 4 -- all required tags present +) +SELECT s.* +FROM scenes s +JOIN required_tags rt ON s.id = rt.scene_id +-- Additional filtering for performer count, etc. +``` + +### 4. ML Integration Points + +#### Phase 1: Data Collection (Current) +- Import scenes from TPDB with metadata +- Import images from PornPics +- Manual tagging to build training dataset + +#### Phase 2: Tag Suggestion (Future) +- ML model suggests tags based on images +- Store predictions with confidence scores +- Human verification workflow + +#### Phase 3: Auto-tagging (Future) +- High-confidence predictions auto-applied +- Periodic retraining with verified data +- Confidence thresholds per tag category + +### 5. Data Quality Safeguards + +**Prevent Tag Spam:** +- Tag category constraints (can't tag "bedroom" as "clothing/color") +- Minimum confidence thresholds +- Rate limiting on ML predictions + +**Ensure Consistency:** +- Tag aliases for variations (pink/rose/hot_pink) +- Batch tag operations +- Tag merging/splitting tools + +**Human Oversight:** +- Verification workflow for ML tags +- Tag dispute resolution +- Quality metrics per tagger (user/ml) + +### 6. API Design (Future) + +```go +// TagService interface +type TagService interface { + // Basic CRUD + CreateTag(categoryID int64, name string, aliases []string) (*Tag, error) + GetTagByID(id int64) (*Tag, error) + SearchTags(query string, categoryID *int64) ([]Tag, error) + + // Scene tagging + AddTagToScene(sceneID, tagID int64, source string, confidence float64) error + RemoveTagFromScene(sceneID, tagID int64) error + GetSceneTags(sceneID int64, verified bool) ([]Tag, error) + + // Complex queries + SearchScenesByTags(requirements TagRequirements) ([]Scene, error) + + // ML integration + StorePrediction(sceneID int64, predictions []TagPrediction) error + VerifyTag(sceneID, tagID int64) error + BulkVerifyTags(sceneID int64, tagIDs []int64) error +} + +type TagRequirements struct { + Required []TagFilter // must have ALL + Optional []TagFilter // nice to have (scoring) + Excluded []TagFilter // must NOT have + MinConfidence float64 + VerifiedOnly bool +} + +type TagFilter struct { + CategoryPath string // "clothing/color" + Value string // "pink" + Operator string // "equals", "contains", "gt", "lt" +} +``` + +## Implementation Roadmap + +### v0.2.0: Enhanced Tagging Foundation +1. ✅ Fix NULL handling (completed) +2. Implement tag_categories table and seed data +3. Update tags table with category_id foreign key +4. Enhance scene_tags with confidence/source/verified +5. Add scene_images table for PornPics integration +6. Create TagService with basic CRUD + +### v0.3.0: Advanced Search +1. Implement complex tag query builder +2. Add tag filtering UI/CLI commands +3. Performance optimization with proper indexes +4. Tag statistics and reporting + +### v0.4.0: ML Preparation +1. Image import from PornPics +2. ML prediction storage table +3. Tag verification workflow +4. Training dataset export + +### v0.5.0: ML Integration +1. Image classification model +2. Auto-tagging pipeline +3. Confidence threshold tuning +4. Retraining automation + +## Notes + +- **Backwards Compatibility**: Current tags table can migrate by adding category_id = (category "general") +- **Storage Consideration**: Images may require significant disk space - consider cloud storage integration +- **Privacy**: All personal data remains local unless explicitly synced +- **Performance**: Proper indexing critical - complex queries with 10+ tags need optimization + +## Example User Flow + +1. User imports scene from TPDB → Basic metadata populated +2. User uploads/links images from PornPics → scene_images populated +3. ML model scans images → scene_tags created with confidence < 1.0, source = 'ml' +4. User reviews suggestions → verified = 1 for accepted tags +5. User searches "blonde + heels" → Query filters by verified tags or confidence > 0.9 +6. System returns ranked results based on tag match confidence diff --git a/assets/README.md b/assets/README.md new file mode 100644 index 0000000..abf5271 --- /dev/null +++ b/assets/README.md @@ -0,0 +1,50 @@ +# Assets Directory + +This directory contains static assets for the Goondex project. + +## Structure + +``` +assets/ +├── logo/ # Project logos (tracked in git) +│ ├── GOONDEX_logo.png +│ ├── GOONDEX_logo_dark.png +│ ├── GOONDEX_logo_light.png +│ ├── GOONDEX_logo.svg +│ └── Team_GoonLOGO.png +│ +├── performers/ # Downloaded performer images (NOT tracked in git) +├── scenes/ # Downloaded scene images (NOT tracked in git) +├── studios/ # Downloaded studio logos (NOT tracked in git) +└── galleries/ # Downloaded image galleries (NOT tracked in git) +``` + +## Git Tracking + +**TRACKED:** +- `/assets/logo/` - All logo files are committed to git + +**NOT TRACKED (in .gitignore):** +- `/assets/performers/` - Performer profile images +- `/assets/scenes/` - Scene posters and screenshots +- `/assets/studios/` - Studio logos downloaded from TPDB +- `/assets/galleries/` - Image galleries +- All `.jpg`, `.jpeg`, `.png`, `.gif`, `.webp`, `.mp4`, `.webm` files outside of `/logo/` + +## Why Images Are Not Tracked + +Downloaded images from ThePornDB and other sources are: +1. **Large files** - Can quickly bloat the git repository +2. **User-specific** - Each user will download their own copy +3. **Regenerable** - Can be re-downloaded from TPDB anytime +4. **Privacy-sensitive** - Should not be pushed to remote repositories + +## Local Storage + +When Goondex downloads images, they will be stored in subdirectories here: +- Performer images: `assets/performers/{performer_id}/` +- Scene images: `assets/scenes/{scene_id}/` +- Studio logos: `assets/studios/{studio_id}/` +- Galleries: `assets/galleries/{entity_type}/{entity_id}/` + +All downloaded assets are stored locally and never committed to version control. diff --git a/cmd/goondex/main.go b/cmd/goondex/main.go index 43aa14e..5aa3191 100644 --- a/cmd/goondex/main.go +++ b/cmd/goondex/main.go @@ -5,9 +5,16 @@ import ( "fmt" "os" + "time" + "github.com/spf13/cobra" "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/merger" "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" + "git.leaktechnologies.dev/stu/Goondex/internal/sync" + "git.leaktechnologies.dev/stu/Goondex/internal/web" ) var ( @@ -24,10 +31,25 @@ func init() { // Add subcommands rootCmd.AddCommand(performerSearchCmd) + rootCmd.AddCommand(performerGetCmd) + rootCmd.AddCommand(performerUpdateCmd) rootCmd.AddCommand(studioSearchCmd) + rootCmd.AddCommand(studioGetCmd) rootCmd.AddCommand(sceneSearchCmd) + rootCmd.AddCommand(sceneGetCmd) rootCmd.AddCommand(importCmd) + rootCmd.AddCommand(syncCmd) + rootCmd.AddCommand(adultempCmd) + rootCmd.AddCommand(webCmd) + rootCmd.AddCommand(enrichCmd) rootCmd.AddCommand(versionCmd) + performerSearchCmd.Flags().Bool("show-bio", false, "Show performer bio in search results") + performerGetCmd.Flags().Bool("show-bio", false, "Show performer bio") + webCmd.Flags().String("addr", "localhost:8080", "Address to listen on") + + // Sync command flags + syncCmd.PersistentFlags().Bool("force", false, "Force sync even if rate limit not met") + syncCmd.PersistentFlags().Duration("interval", 1*time.Hour, "Minimum interval between syncs (default 1h, recommended 24h)") } // Import command with subcommands @@ -39,8 +61,319 @@ var importCmd = &cobra.Command{ func init() { importCmd.AddCommand(importPerformerCmd) + importCmd.AddCommand(importAllPerformersCmd) importCmd.AddCommand(importStudioCmd) + importCmd.AddCommand(importAllStudiosCmd) importCmd.AddCommand(importSceneCmd) + importCmd.AddCommand(importAllScenesCmd) + importCmd.AddCommand(importMovieCmd) + + // Flags for bulk import + importAllPerformersCmd.Flags().Int("start-page", 1, "Page to start from (for resuming)") + importAllPerformersCmd.Flags().Int("max-pages", 0, "Maximum pages to import (0 = all)") + importAllStudiosCmd.Flags().Int("start-page", 1, "Page to start from (for resuming)") + importAllStudiosCmd.Flags().Int("max-pages", 0, "Maximum pages to import (0 = all)") + importAllScenesCmd.Flags().Int("start-page", 1, "Page to start from (for resuming)") + importAllScenesCmd.Flags().Int("max-pages", 0, "Maximum pages to import (0 = all)") + + // Movie import flags + importMovieCmd.Flags().String("source", "adultemp", "Source to import from (adultemp)") +} + +// Sync command with subcommands +var syncCmd = &cobra.Command{ + Use: "sync", + Short: "Sync and update existing data from TPDB", + Long: `Update existing performers, studios, and scenes with latest data from ThePornDB. + +Rate limiting is enforced to prevent excessive API calls: +- Default minimum interval: 1 hour +- Recommended interval: 24 hours +- Use --force to override rate limiting + +Examples: + goondex sync all # Sync all entities (with 1h rate limit) + goondex sync all --interval 24h # Sync all entities (24h rate limit) + goondex sync all --force # Force sync, ignore rate limit + goondex sync performers # Sync only performers + goondex sync status # View last sync times`, +} + +func init() { + syncCmd.AddCommand(syncAllCmd) + syncCmd.AddCommand(syncPerformersCmd) + syncCmd.AddCommand(syncStudiosCmd) + syncCmd.AddCommand(syncScenesCmd) + syncCmd.AddCommand(syncStatusCmd) +} + +var syncAllCmd = &cobra.Command{ + Use: "all", + Short: "Sync all entities (performers, studios, scenes)", + RunE: func(cmd *cobra.Command, args []string) error { + force, _ := cmd.Flags().GetBool("force") + interval, _ := cmd.Flags().GetDuration("interval") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := sync.NewService(database, scraper) + + opts := sync.SyncOptions{ + Force: force, + MinInterval: interval, + } + + fmt.Printf("Starting sync with %s minimum interval...\n", interval) + if force { + fmt.Println("⚠ Force mode enabled - ignoring rate limits") + } + fmt.Println() + + results, err := service.SyncAll(context.Background(), opts) + if err != nil { + return err + } + + // Display results + for _, r := range results { + fmt.Printf("═══ %s ═══\n", r.EntityType) + if r.ErrorMessage != "" { + fmt.Printf("⚠ %s\n", r.ErrorMessage) + } else { + fmt.Printf("✓ Updated: %d\n", r.Updated) + if r.Failed > 0 { + fmt.Printf("✗ Failed: %d\n", r.Failed) + } + if r.Skipped > 0 { + fmt.Printf("- Skipped: %d\n", r.Skipped) + } + fmt.Printf("⏱ Duration: %s\n", r.Duration.Round(time.Second)) + } + fmt.Println() + } + + return nil + }, +} + +var syncPerformersCmd = &cobra.Command{ + Use: "performers", + Short: "Sync performers only", + RunE: func(cmd *cobra.Command, args []string) error { + force, _ := cmd.Flags().GetBool("force") + interval, _ := cmd.Flags().GetDuration("interval") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := sync.NewService(database, scraper) + + opts := sync.SyncOptions{ + Force: force, + MinInterval: interval, + } + + fmt.Printf("Syncing performers...\n") + result, err := service.SyncPerformers(context.Background(), opts) + if err != nil { + return err + } + + if result.ErrorMessage != "" { + fmt.Printf("⚠ %s\n", result.ErrorMessage) + } else { + fmt.Printf("✓ Updated: %d\n", result.Updated) + if result.Failed > 0 { + fmt.Printf("✗ Failed: %d\n", result.Failed) + } + if result.Skipped > 0 { + fmt.Printf("- Skipped: %d\n", result.Skipped) + } + fmt.Printf("⏱ Duration: %s\n", result.Duration.Round(time.Second)) + } + + return nil + }, +} + +var syncStudiosCmd = &cobra.Command{ + Use: "studios", + Short: "Sync studios only", + RunE: func(cmd *cobra.Command, args []string) error { + force, _ := cmd.Flags().GetBool("force") + interval, _ := cmd.Flags().GetDuration("interval") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := sync.NewService(database, scraper) + + opts := sync.SyncOptions{ + Force: force, + MinInterval: interval, + } + + fmt.Printf("Syncing studios...\n") + result, err := service.SyncStudios(context.Background(), opts) + if err != nil { + return err + } + + if result.ErrorMessage != "" { + fmt.Printf("⚠ %s\n", result.ErrorMessage) + } else { + fmt.Printf("✓ Updated: %d\n", result.Updated) + if result.Failed > 0 { + fmt.Printf("✗ Failed: %d\n", result.Failed) + } + if result.Skipped > 0 { + fmt.Printf("- Skipped: %d\n", result.Skipped) + } + fmt.Printf("⏱ Duration: %s\n", result.Duration.Round(time.Second)) + } + + return nil + }, +} + +var syncScenesCmd = &cobra.Command{ + Use: "scenes", + Short: "Sync scenes only", + RunE: func(cmd *cobra.Command, args []string) error { + force, _ := cmd.Flags().GetBool("force") + interval, _ := cmd.Flags().GetDuration("interval") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := sync.NewService(database, scraper) + + opts := sync.SyncOptions{ + Force: force, + MinInterval: interval, + } + + fmt.Printf("Syncing scenes...\n") + result, err := service.SyncScenes(context.Background(), opts) + if err != nil { + return err + } + + if result.ErrorMessage != "" { + fmt.Printf("⚠ %s\n", result.ErrorMessage) + } else { + fmt.Printf("✓ Updated: %d\n", result.Updated) + if result.Failed > 0 { + fmt.Printf("✗ Failed: %d\n", result.Failed) + } + if result.Skipped > 0 { + fmt.Printf("- Skipped: %d\n", result.Skipped) + } + fmt.Printf("⏱ Duration: %s\n", result.Duration.Round(time.Second)) + } + + return nil + }, +} + +var syncStatusCmd = &cobra.Command{ + Use: "status", + Short: "View sync status and last sync times", + RunE: func(cmd *cobra.Command, args []string) error { + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + scraper := tpdb.NewScraper("https://api.theporndb.net", "") + service := sync.NewService(database, scraper) + + statuses, err := service.GetSyncStatus() + if err != nil { + return err + } + + if len(statuses) == 0 { + fmt.Println("No sync history found.") + fmt.Println("\nRun 'goondex sync all' to perform your first sync.") + return nil + } + + fmt.Println("Sync Status:") + fmt.Println() + + for _, s := range statuses { + fmt.Printf("═══ %s ═══\n", s.EntityType) + fmt.Printf("Status: %s\n", s.Status) + fmt.Printf("Last Sync: %s\n", s.LastSyncAt.Format("2006-01-02 15:04:05")) + fmt.Printf("Updated: %d\n", s.RecordsUpdated) + if s.RecordsFailed > 0 { + fmt.Printf("Failed: %d\n", s.RecordsFailed) + } + if s.ErrorMessage != "" { + fmt.Printf("Error: %s\n", s.ErrorMessage) + } + + // Calculate time since last sync + timeSince := time.Since(s.LastSyncAt) + fmt.Printf("Time since last sync: %s\n", formatDuration(timeSince)) + + fmt.Println() + } + + return nil + }, +} + +func formatDuration(d time.Duration) string { + if d < time.Minute { + return fmt.Sprintf("%d seconds", int(d.Seconds())) + } + if d < time.Hour { + return fmt.Sprintf("%d minutes", int(d.Minutes())) + } + if d < 24*time.Hour { + return fmt.Sprintf("%d hours", int(d.Hours())) + } + return fmt.Sprintf("%d days", int(d.Hours()/24)) } func main() { @@ -58,11 +391,39 @@ func getDB() (*db.DB, error) { return database, nil } +var webCmd = &cobra.Command{ + Use: "web", + Short: "Start the web UI server", + Long: `Start a web server that provides a visual interface for browsing your Goondex database.`, + RunE: func(cmd *cobra.Command, args []string) error { + addr, _ := cmd.Flags().GetString("addr") + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + server, err := web.NewServer(database, addr) + if err != nil { + return fmt.Errorf("failed to create web server: %w", err) + } + + return server.Start() + }, +} + var versionCmd = &cobra.Command{ Use: "version", Short: "Print version information", Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Goondex v0.1.0-dev2") + fmt.Println("Goondex v0.1.0-dev4") + fmt.Println("Features:") + fmt.Println(" • TPDB integration with auto-import") + fmt.Println(" • Adult Empire scraper (scenes & performers)") + fmt.Println(" • Multi-source data merging") + fmt.Println(" • Grid-based web UI with GX components") + fmt.Println(" • Performer/studio/scene management") }, } @@ -72,6 +433,7 @@ var performerSearchCmd = &cobra.Command{ Args: cobra.MinimumNArgs(1), RunE: func(cmd *cobra.Command, args []string) error { query := args[0] + showBio, _ := cmd.Flags().GetBool("show-bio") database, err := getDB() if err != nil { @@ -157,6 +519,15 @@ var performerSearchCmd = &cobra.Command{ } fmt.Printf("\n") + // Stats + sceneCount, err := store.GetSceneCount(p.ID) + if err != nil { + fmt.Printf("⚠ Failed to get scene count: %v\n", err) + } else { + fmt.Printf("Scenes: %d\n", sceneCount) + } + fmt.Printf("\n") + // Personal info if p.Gender != "" { fmt.Printf("Gender: %s\n", p.Gender) @@ -218,7 +589,7 @@ var performerSearchCmd = &cobra.Command{ fmt.Printf("\n") // Bio - if p.Bio != "" { + if showBio && p.Bio != "" { fmt.Printf("Bio:\n%s\n\n", p.Bio) } @@ -237,6 +608,233 @@ var performerSearchCmd = &cobra.Command{ }, } +var performerUpdateCmd = &cobra.Command{ + Use: "performer-update [id]", + Short: "Update/refresh performer data from TPDB and Adult Empire", + Long: `Update an existing performer with the latest data from TPDB and optionally merge with Adult Empire data.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + // Parse ID + var performerID int64 + if _, err := fmt.Sscanf(args[0], "%d", &performerID); err != nil { + return fmt.Errorf("invalid performer ID: %s", args[0]) + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + + // Get existing performer + performer, err := performerStore.GetByID(performerID) + if err != nil { + return fmt.Errorf("performer not found: %w", err) + } + + fmt.Printf("Updating performer: %s (ID: %d)\n", performer.Name, performer.ID) + + // Update from TPDB if available + if performer.Source == "tpdb" && performer.SourceID != "" { + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey != "" { + fmt.Println("📥 Fetching latest data from TPDB...") + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + + updatedPerformer, err := scraper.GetPerformerByID(context.Background(), performer.SourceID) + if err != nil { + fmt.Printf("⚠ Failed to fetch from TPDB: %v\n", err) + } else { + // Preserve local ID + updatedPerformer.ID = performer.ID + + if err := performerStore.Update(updatedPerformer); err != nil { + fmt.Printf("⚠ Failed to update: %v\n", err) + } else { + fmt.Println("✓ Updated from TPDB") + performer = updatedPerformer + } + } + } else { + fmt.Println("⚠ TPDB_API_KEY not set, skipping TPDB update") + } + } + + // Optionally search Adult Empire for additional data + fmt.Printf("\n🔍 Searching Adult Empire for '%s'...\n", performer.Name) + + adultempScraper, err := adultemp.NewScraper() + if err != nil { + fmt.Printf("⚠ Failed to create Adult Empire scraper: %v\n", err) + return nil + } + + results, err := adultempScraper.SearchPerformersByName(context.Background(), performer.Name) + if err != nil { + fmt.Printf("⚠ Adult Empire search failed: %v\n", err) + return nil + } + + if len(results) == 0 { + fmt.Println("No matches found on Adult Empire") + return nil + } + + fmt.Printf("\nFound %d potential match(es) on Adult Empire:\n", len(results)) + for i, result := range results { + fmt.Printf("%d. %s - %s\n", i+1, result.Title, result.URL) + } + + fmt.Println("\n✓ Performer data refreshed successfully") + fmt.Println("Note: Use 'adultemp scrape-performer ' to import specific Adult Empire data") + + return nil + }, +} + +var performerGetCmd = &cobra.Command{ + Use: "performer-get [id]", + Short: "Get detailed information about a performer by ID", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + // Parse ID + var performerID int64 + if _, err := fmt.Sscanf(args[0], "%d", &performerID); err != nil { + return fmt.Errorf("invalid performer ID: %s", args[0]) + } + + showBio, _ := cmd.Flags().GetBool("show-bio") + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + store := db.NewPerformerStore(database) + + // Get performer + performer, err := store.GetByID(performerID) + if err != nil { + return fmt.Errorf("performer not found: %w", err) + } + + // Get scene count + sceneCount, err := store.GetSceneCount(performerID) + if err != nil { + fmt.Printf("⚠ Failed to get scene count: %v\n", err) + sceneCount = 0 + } + + // Display performer information (reusing the format from performer-search) + fmt.Printf("═══════════════════════════════════════════════════\n") + fmt.Printf("Name: %s\n", performer.Name) + if performer.Aliases != "" { + fmt.Printf("Aliases: %s\n", performer.Aliases) + } + fmt.Printf("\n") + + // IDs + fmt.Printf("Local ID: %d\n", performer.ID) + if performer.Source != "" { + fmt.Printf("Source: %s\n", performer.Source) + fmt.Printf("UUID: %s\n", performer.SourceID) + if performer.SourceNumericID > 0 { + fmt.Printf("TPDB ID: %d\n", performer.SourceNumericID) + } + } + fmt.Printf("\n") + + // Stats + fmt.Printf("Scenes: %d\n", sceneCount) + fmt.Printf("\n") + + // Personal info + if performer.Gender != "" { + fmt.Printf("Gender: %s\n", performer.Gender) + } + if performer.Birthday != "" { + fmt.Printf("Birthday: %s\n", performer.Birthday) + } + if performer.Astrology != "" { + fmt.Printf("Astrology: %s\n", performer.Astrology) + } + if performer.DateOfDeath != "" { + fmt.Printf("Date of Death: %s\n", performer.DateOfDeath) + } + if performer.Career != "" { + fmt.Printf("Career: %s\n", performer.Career) + } + if performer.Birthplace != "" { + fmt.Printf("Birthplace: %s\n", performer.Birthplace) + } + if performer.Ethnicity != "" { + fmt.Printf("Ethnicity: %s\n", performer.Ethnicity) + } + if performer.Nationality != "" { + fmt.Printf("Nationality: %s\n", performer.Nationality) + } + fmt.Printf("\n") + + // Physical attributes + if performer.CupSize != "" { + fmt.Printf("Cup Size: %s\n", performer.CupSize) + } + if performer.HairColor != "" { + fmt.Printf("Hair Colour: %s\n", performer.HairColor) + } + if performer.EyeColor != "" { + fmt.Printf("Eye Colour: %s\n", performer.EyeColor) + } + if performer.Height > 0 { + feet := float64(performer.Height) / 30.48 + inches := (float64(performer.Height) / 2.54) - (feet * 12) + fmt.Printf("Height: %dcm (%.0f'%.0f\")\n", performer.Height, feet, inches) + } + if performer.Weight > 0 { + lbs := float64(performer.Weight) * 2.20462 + fmt.Printf("Weight: %dkg (%.0flb)\n", performer.Weight, lbs) + } + if performer.Measurements != "" { + fmt.Printf("Measurements: %s\n", performer.Measurements) + } + if performer.TattooDescription != "" { + fmt.Printf("Tattoos: %s\n", performer.TattooDescription) + } + if performer.PiercingDescription != "" { + fmt.Printf("Piercings: %s\n", performer.PiercingDescription) + } + if performer.BoobJob != "" { + fmt.Printf("Fake Boobs: %s\n", performer.BoobJob) + } + fmt.Printf("\n") + + // Bio + if showBio && performer.Bio != "" { + fmt.Printf("Bio:\n%s\n\n", performer.Bio) + } + + // Media + if performer.ImageURL != "" { + fmt.Printf("Image: %s\n", performer.ImageURL) + } + if performer.PosterURL != "" { + fmt.Printf("Poster: %s\n", performer.PosterURL) + } + + // Timestamps + fmt.Printf("\n") + fmt.Printf("Created: %s\n", performer.CreatedAt.Format("2006-01-02 15:04:05")) + fmt.Printf("Updated: %s\n", performer.UpdatedAt.Format("2006-01-02 15:04:05")) + + fmt.Printf("═══════════════════════════════════════════════════\n\n") + + return nil + }, +} + var studioSearchCmd = &cobra.Command{ Use: "studio-search [query]", Short: "Search for studios (auto-fetches from TPDB if not in local database)", @@ -317,6 +915,90 @@ var studioSearchCmd = &cobra.Command{ }, } +var studioGetCmd = &cobra.Command{ + Use: "studio-get [id]", + Short: "Get detailed information about a studio by ID", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + // Parse ID + var studioID int64 + if _, err := fmt.Sscanf(args[0], "%d", &studioID); err != nil { + return fmt.Errorf("invalid studio ID: %s", args[0]) + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + store := db.NewStudioStore(database) + + // Get studio + studio, err := store.GetByID(studioID) + if err != nil { + return fmt.Errorf("studio not found: %w", err) + } + + // Get scene count + sceneCount, err := store.GetSceneCount(studioID) + if err != nil { + fmt.Printf("⚠ Failed to get scene count: %v\n", err) + sceneCount = 0 + } + + // Display studio information + fmt.Printf("═══════════════════════════════════════════════════\n") + fmt.Printf("Studio Details\n") + fmt.Printf("═══════════════════════════════════════════════════\n\n") + + fmt.Printf("ID: %d\n", studio.ID) + fmt.Printf("Name: %s\n", studio.Name) + + if studio.Description != "" { + fmt.Printf("\nDescription:\n%s\n", studio.Description) + } + + fmt.Printf("\n") + fmt.Printf("Scenes: %d\n", sceneCount) + + // Parent studio + if studio.ParentID != nil && *studio.ParentID > 0 { + parentStudio, err := store.GetByID(*studio.ParentID) + if err == nil { + fmt.Printf("Parent Studio: %s (ID: %d)\n", parentStudio.Name, parentStudio.ID) + } else { + fmt.Printf("Parent Studio ID: %d\n", *studio.ParentID) + } + } + + // Source information + if studio.Source != "" { + fmt.Printf("\n") + fmt.Printf("Source: %s\n", studio.Source) + fmt.Printf("Source ID: %s\n", studio.SourceID) + } + + // Media + if studio.ImageURL != "" { + fmt.Printf("\n") + fmt.Printf("Image: %s\n", studio.ImageURL) + } + if studio.ImagePath != "" { + fmt.Printf("Image Path: %s\n", studio.ImagePath) + } + + // Timestamps + fmt.Printf("\n") + fmt.Printf("Created: %s\n", studio.CreatedAt.Format("2006-01-02 15:04:05")) + fmt.Printf("Updated: %s\n", studio.UpdatedAt.Format("2006-01-02 15:04:05")) + + fmt.Printf("\n═══════════════════════════════════════════════════\n") + + return nil + }, +} + var sceneSearchCmd = &cobra.Command{ Use: "scene-search [query]", Short: "Search for scenes (auto-fetches from TPDB if not in local database)", @@ -409,6 +1091,519 @@ var sceneSearchCmd = &cobra.Command{ }, } +var sceneGetCmd = &cobra.Command{ + Use: "scene-get [id]", + Short: "Get detailed information about a scene by ID", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + // Parse ID + var sceneID int64 + if _, err := fmt.Sscanf(args[0], "%d", &sceneID); err != nil { + return fmt.Errorf("invalid scene ID: %s", args[0]) + } + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + sceneStore := db.NewSceneStore(database) + studioStore := db.NewStudioStore(database) + + // Get scene + scene, err := sceneStore.GetByID(sceneID) + if err != nil { + return fmt.Errorf("scene not found: %w", err) + } + + // Get performers + performers, err := sceneStore.GetPerformers(sceneID) + if err != nil { + fmt.Printf("⚠ Failed to get performers: %v\n", err) + } + + // Get tags + tags, err := sceneStore.GetTags(sceneID) + if err != nil { + fmt.Printf("⚠ Failed to get tags: %v\n", err) + } + + // Get studio if present + var studio *model.Studio + if scene.StudioID != nil && *scene.StudioID > 0 { + studio, err = studioStore.GetByID(*scene.StudioID) + if err != nil { + fmt.Printf("⚠ Failed to get studio: %v\n", err) + } + } + + // Display scene information + fmt.Printf("═══════════════════════════════════════════════════\n") + fmt.Printf("Scene Details\n") + fmt.Printf("═══════════════════════════════════════════════════\n\n") + + fmt.Printf("ID: %d\n", scene.ID) + fmt.Printf("Title: %s\n", scene.Title) + + if scene.Code != "" { + fmt.Printf("Code: %s\n", scene.Code) + } + + if scene.Date != "" { + fmt.Printf("Date: %s\n", scene.Date) + } + + if scene.Director != "" { + fmt.Printf("Director: %s\n", scene.Director) + } + + fmt.Printf("\n") + + // Studio + if studio != nil { + fmt.Printf("Studio: %s (ID: %d)\n", studio.Name, studio.ID) + } + + // Performers + if len(performers) > 0 { + fmt.Printf("\nPerformers (%d):\n", len(performers)) + for _, p := range performers { + fmt.Printf(" - %s (ID: %d)\n", p.Name, p.ID) + } + } + + // Tags + if len(tags) > 0 { + fmt.Printf("\nTags (%d):\n", len(tags)) + for _, t := range tags { + fmt.Printf(" - %s\n", t.Name) + } + } + + // Description + if scene.Description != "" { + fmt.Printf("\nDescription:\n%s\n", scene.Description) + } + + // Source information + if scene.Source != "" { + fmt.Printf("\n") + fmt.Printf("Source: %s\n", scene.Source) + fmt.Printf("Source ID: %s\n", scene.SourceID) + } + + // Media + if scene.URL != "" { + fmt.Printf("\nURL: %s\n", scene.URL) + } + if scene.ImageURL != "" { + fmt.Printf("Image: %s\n", scene.ImageURL) + } + if scene.ImagePath != "" { + fmt.Printf("Image Path: %s\n", scene.ImagePath) + } + + // Timestamps + fmt.Printf("\n") + fmt.Printf("Created: %s\n", scene.CreatedAt.Format("2006-01-02 15:04:05")) + fmt.Printf("Updated: %s\n", scene.UpdatedAt.Format("2006-01-02 15:04:05")) + + fmt.Printf("\n═══════════════════════════════════════════════════\n") + + return nil + }, +} + +var importAllPerformersCmd = &cobra.Command{ + Use: "all-performers", + Short: "Import ALL performers from TPDB (paginated)", + Long: `Import all 10,000+ performers from ThePornDB by paginating through all pages. This may take a while.`, + RunE: func(cmd *cobra.Command, args []string) error { + startPage, _ := cmd.Flags().GetInt("start-page") + maxPages, _ := cmd.Flags().GetInt("max-pages") + + // Get API key from environment + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + // Create TPDB scraper + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + + // Open database + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ TPDB BULK PERFORMER IMPORT ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Println() + + totalImported := 0 + totalFailed := 0 + totalSkipped := 0 + currentPage := startPage + + for { + fmt.Printf("📥 Fetching page %d...\n", currentPage) + + performers, meta, err := scraper.ListPerformers(context.Background(), currentPage) + if err != nil { + fmt.Printf("⚠ Failed to fetch page %d: %v\n", currentPage, err) + totalFailed++ + currentPage++ + continue + } + + if meta == nil { + fmt.Println("⚠ No metadata returned") + break + } + + fmt.Printf(" Page %d/%d | Total: %d performers | Per page: %d\n", + meta.CurrentPage, meta.LastPage, meta.Total, meta.PerPage) + + // Import performers from this page + pageImported := 0 + pageFailed := 0 + pageSkipped := 0 + + for _, p := range performers { + // Check if already exists + existing, _ := performerStore.GetBySourceID("tpdb", p.SourceID) + if existing != nil { + pageSkipped++ + totalSkipped++ + continue + } + + if err := performerStore.Create(&p); err != nil { + fmt.Printf(" ⚠ Failed to import %s: %v\n", p.Name, err) + pageFailed++ + totalFailed++ + continue + } + pageImported++ + totalImported++ + } + + fmt.Printf(" ✓ Imported: %d | ⚠ Failed: %d | ⊘ Skipped: %d\n\n", + pageImported, pageFailed, pageSkipped) + + // Check if we should stop + if maxPages > 0 && (currentPage-startPage+1) >= maxPages { + fmt.Printf("⏹ Reached maximum pages limit (%d)\n", maxPages) + break + } + + if currentPage >= meta.LastPage { + fmt.Println("✓ Reached last page") + break + } + + currentPage++ + + // Small delay to be nice to the API + time.Sleep(500 * time.Millisecond) + } + + fmt.Println() + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ IMPORT COMPLETE ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Printf("✓ Total Imported: %d\n", totalImported) + fmt.Printf("⚠ Total Failed: %d\n", totalFailed) + fmt.Printf("⊘ Total Skipped (already exist): %d\n", totalSkipped) + fmt.Printf("\n💡 Tip: Run 'sqlite3 goondex.db \"SELECT COUNT(*) FROM performers\"' to check total count\n") + + return nil + }, +} + +var importAllStudiosCmd = &cobra.Command{ + Use: "all-studios", + Short: "Import ALL studios from TPDB (paginated)", + Long: `Import all 60,000+ studios from ThePornDB by paginating through all pages. This may take a while.`, + RunE: func(cmd *cobra.Command, args []string) error { + startPage, _ := cmd.Flags().GetInt("start-page") + maxPages, _ := cmd.Flags().GetInt("max-pages") + + // Get API key from environment + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + // Create TPDB scraper + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + + // Get database + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + studioStore := db.NewStudioStore(database) + + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ TPDB BULK STUDIO IMPORT - ALL PAGES ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Printf("Starting from page: %d\n", startPage) + if maxPages > 0 { + fmt.Printf("Max pages: %d\n", maxPages) + } else { + fmt.Println("Max pages: ALL (no limit)") + } + fmt.Println() + + totalImported := 0 + totalFailed := 0 + totalSkipped := 0 + currentPage := startPage + + for { + fmt.Printf("📥 Fetching page %d...\n", currentPage) + + studios, meta, err := scraper.ListStudios(context.Background(), currentPage) + if err != nil { + fmt.Printf("⚠ Failed to fetch page %d: %v\n", currentPage, err) + totalFailed++ + currentPage++ + continue + } + + if meta == nil { + fmt.Println("⚠ No metadata returned") + break + } + + fmt.Printf(" Page %d/%d | Total: %d studios | Per page: %d\n", + meta.CurrentPage, meta.LastPage, meta.Total, meta.PerPage) + + // Import studios from this page + pageImported := 0 + pageFailed := 0 + pageSkipped := 0 + + for _, st := range studios { + // Check if already exists + existing, _ := studioStore.GetBySourceID("tpdb", st.SourceID) + if existing != nil { + pageSkipped++ + totalSkipped++ + continue + } + + if err := studioStore.Create(&st); err != nil { + fmt.Printf(" ⚠ Failed to import %s: %v\n", st.Name, err) + pageFailed++ + totalFailed++ + continue + } + pageImported++ + totalImported++ + } + + fmt.Printf(" ✓ Imported: %d | ⚠ Failed: %d | ⊘ Skipped: %d\n\n", + pageImported, pageFailed, pageSkipped) + + // Check if we should stop + if maxPages > 0 && (currentPage-startPage+1) >= maxPages { + fmt.Printf("⏹ Reached maximum pages limit (%d)\n", maxPages) + break + } + + if currentPage >= meta.LastPage { + fmt.Println("✓ Reached last page") + break + } + + currentPage++ + + // Small delay to be nice to the API + time.Sleep(500 * time.Millisecond) + } + + fmt.Println() + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ IMPORT COMPLETE ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Printf("✓ Total Imported: %d\n", totalImported) + fmt.Printf("⚠ Total Failed: %d\n", totalFailed) + fmt.Printf("⊘ Total Skipped (already exist): %d\n", totalSkipped) + fmt.Printf("\n💡 Tip: Run 'sqlite3 goondex.db \"SELECT COUNT(*) FROM studios\"' to check total count\n") + + return nil + }, +} + +var importAllScenesCmd = &cobra.Command{ + Use: "all-scenes", + Short: "Import ALL scenes from TPDB (paginated)", + Long: `Import all scenes from ThePornDB by paginating through all pages. This may take a while.`, + RunE: func(cmd *cobra.Command, args []string) error { + startPage, _ := cmd.Flags().GetInt("start-page") + maxPages, _ := cmd.Flags().GetInt("max-pages") + + // Get API key from environment + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + return fmt.Errorf("TPDB_API_KEY environment variable is not set") + } + + // Create TPDB scraper + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + + // Get database + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + sceneStore := db.NewSceneStore(database) + performerStore := db.NewPerformerStore(database) + studioStore := db.NewStudioStore(database) + tagStore := db.NewTagStore(database) + + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ TPDB BULK SCENE IMPORT - ALL PAGES ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Printf("Starting from page: %d\n", startPage) + if maxPages > 0 { + fmt.Printf("Max pages: %d\n", maxPages) + } else { + fmt.Println("Max pages: ALL (no limit)") + } + fmt.Println() + + totalImported := 0 + totalFailed := 0 + totalSkipped := 0 + currentPage := startPage + + for { + fmt.Printf("📥 Fetching page %d...\n", currentPage) + + scenes, meta, err := scraper.ListScenes(context.Background(), currentPage) + if err != nil { + fmt.Printf("⚠ Failed to fetch page %d: %v\n", currentPage, err) + totalFailed++ + currentPage++ + continue + } + + if meta == nil { + fmt.Println("⚠ No metadata returned") + break + } + + fmt.Printf(" Page %d/%d | Total: %d scenes | Per page: %d\n", + meta.CurrentPage, meta.LastPage, meta.Total, meta.PerPage) + + // Import scenes from this page + pageImported := 0 + pageFailed := 0 + pageSkipped := 0 + + for _, sc := range scenes { + // Check if already exists + existing, _ := sceneStore.GetBySourceID("tpdb", sc.SourceID) + if existing != nil { + pageSkipped++ + totalSkipped++ + continue + } + + // Import studio if not exists + if sc.Studio != nil { + existingStudio, _ := studioStore.GetBySourceID(sc.Studio.Source, sc.Studio.SourceID) + if existingStudio != nil { + sc.StudioID = &existingStudio.ID + } else { + if err := studioStore.Create(sc.Studio); err == nil { + sc.StudioID = &sc.Studio.ID + } + } + } + + // Create scene + if err := sceneStore.Create(&sc); err != nil { + fmt.Printf(" ⚠ Failed to import %s: %v\n", sc.Title, err) + pageFailed++ + totalFailed++ + continue + } + + // Import performers and link them + for _, p := range sc.Performers { + existingPerformer, _ := performerStore.GetBySourceID(p.Source, p.SourceID) + if existingPerformer != nil { + sceneStore.AddPerformer(sc.ID, existingPerformer.ID) + } else { + if err := performerStore.Create(&p); err == nil { + sceneStore.AddPerformer(sc.ID, p.ID) + } + } + } + + // Import tags and link them + for _, t := range sc.Tags { + existingTag, _ := tagStore.GetByName(t.Name) + if existingTag != nil { + sceneStore.AddTag(sc.ID, existingTag.ID) + } else { + if err := tagStore.Create(&t); err == nil { + sceneStore.AddTag(sc.ID, t.ID) + } + } + } + + pageImported++ + totalImported++ + } + + fmt.Printf(" ✓ Imported: %d | ⚠ Failed: %d | ⊘ Skipped: %d\n\n", + pageImported, pageFailed, pageSkipped) + + // Check if we should stop + if maxPages > 0 && (currentPage-startPage+1) >= maxPages { + fmt.Printf("⏹ Reached maximum pages limit (%d)\n", maxPages) + break + } + + if currentPage >= meta.LastPage { + fmt.Println("✓ Reached last page") + break + } + + currentPage++ + + // Small delay to be nice to the API + time.Sleep(500 * time.Millisecond) + } + + fmt.Println() + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ IMPORT COMPLETE ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Printf("✓ Total Imported: %d\n", totalImported) + fmt.Printf("⚠ Total Failed: %d\n", totalFailed) + fmt.Printf("⊘ Total Skipped (already exist): %d\n", totalSkipped) + fmt.Printf("\n💡 Tip: Run 'sqlite3 goondex.db \"SELECT COUNT(*) FROM scenes\"' to check total count\n") + + return nil + }, +} + var importPerformerCmd = &cobra.Command{ Use: "performer [query]", Short: "Search TPDB for performers and import them to local database", @@ -521,6 +1716,448 @@ var importStudioCmd = &cobra.Command{ }, } +// Adult Empire command with subcommands +var adultempCmd = &cobra.Command{ + Use: "adultemp", + Short: "Scrape data from Adult Empire", + Long: `Search and scrape performers, scenes, and metadata from Adult Empire (adultdvdempire.com).`, +} + +func init() { + adultempCmd.AddCommand(adultempSearchSceneCmd) + adultempCmd.AddCommand(adultempSearchPerformerCmd) + adultempCmd.AddCommand(adultempSearchMovieCmd) + adultempCmd.AddCommand(adultempScrapeSceneCmd) + adultempCmd.AddCommand(adultempScrapePerformerCmd) + adultempCmd.AddCommand(adultempScrapeMovieCmd) + adultempCmd.AddCommand(adultempMergePerformerCmd) + adultempCmd.PersistentFlags().String("etoken", "", "Adult Empire authentication token (etoken cookie)") +} + +var adultempSearchSceneCmd = &cobra.Command{ + Use: "search-scene [query]", + Short: "Search for scenes on Adult Empire", + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + query := args[0] + + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Set auth token if provided + if flag := cmd.Flag("etoken"); flag != nil { + if etoken := flag.Value.String(); etoken != "" { + if err := scraper.SetAuthToken(etoken); err != nil { + return fmt.Errorf("failed to set auth token: %w", err) + } + } + } + + fmt.Printf("Searching Adult Empire for scenes matching '%s'...\n\n", query) + + results, err := scraper.SearchScenesByName(context.Background(), query) + if err != nil { + return fmt.Errorf("search failed: %w", err) + } + + if len(results) == 0 { + fmt.Println("No scenes found on Adult Empire") + return nil + } + + fmt.Printf("Found %d scene(s):\n\n", len(results)) + for i, result := range results { + fmt.Printf("%d. %s\n", i+1, result.Title) + fmt.Printf(" URL: %s\n", result.URL) + if result.Image != "" { + fmt.Printf(" Image: %s\n", result.Image) + } + fmt.Println() + } + + return nil + }, +} + +var adultempSearchPerformerCmd = &cobra.Command{ + Use: "search-performer [name]", + Short: "Search for performers on Adult Empire", + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + name := args[0] + + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Set auth token if provided + if flag := cmd.Flag("etoken"); flag != nil { + if etoken := flag.Value.String(); etoken != "" { + if err := scraper.SetAuthToken(etoken); err != nil { + return fmt.Errorf("failed to set auth token: %w", err) + } + } + } + + fmt.Printf("Searching Adult Empire for performers matching '%s'...\n\n", name) + + results, err := scraper.SearchPerformersByName(context.Background(), name) + if err != nil { + return fmt.Errorf("search failed: %w", err) + } + + if len(results) == 0 { + fmt.Println("No performers found on Adult Empire") + return nil + } + + fmt.Printf("Found %d performer(s):\n\n", len(results)) + for i, result := range results { + fmt.Printf("%d. %s\n", i+1, result.Title) + fmt.Printf(" URL: %s\n", result.URL) + if result.Image != "" { + fmt.Printf(" Image: %s\n", result.Image) + } + fmt.Println() + } + + return nil + }, +} + +var adultempScrapeSceneCmd = &cobra.Command{ + Use: "scrape-scene [url]", + Short: "Scrape a scene from Adult Empire by URL and import to database", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + url := args[0] + + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Set auth token if provided + if flag := cmd.Flag("etoken"); flag != nil { + if etoken := flag.Value.String(); etoken != "" { + if err := scraper.SetAuthToken(etoken); err != nil { + return fmt.Errorf("failed to set auth token: %w", err) + } + } + } + + fmt.Printf("Scraping scene from %s...\n", url) + + sceneData, err := scraper.ScrapeSceneByURL(context.Background(), url) + if err != nil { + return fmt.Errorf("scrape failed: %w", err) + } + + // Display scraped data + fmt.Printf("\n✓ Scraped scene data:\n") + fmt.Printf("═══════════════════════════════════════════════════\n") + fmt.Printf("Title: %s\n", sceneData.Title) + if sceneData.Date != "" { + fmt.Printf("Date: %s\n", sceneData.Date) + } + if sceneData.Studio != "" { + fmt.Printf("Studio: %s\n", sceneData.Studio) + } + if sceneData.Director != "" { + fmt.Printf("Director: %s\n", sceneData.Director) + } + if sceneData.Code != "" { + fmt.Printf("Code: %s\n", sceneData.Code) + } + if len(sceneData.Performers) > 0 { + fmt.Printf("Performers: %s\n", sceneData.Performers) + } + if len(sceneData.Tags) > 0 { + fmt.Printf("Tags: %s\n", sceneData.Tags) + } + if sceneData.Description != "" { + fmt.Printf("\nDescription:\n%s\n", sceneData.Description) + } + fmt.Printf("═══════════════════════════════════════════════════\n\n") + + // Convert to model and save to database + scene := scraper.ConvertSceneToModel(sceneData) + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + sceneStore := db.NewSceneStore(database) + + // Import studio if present + if sceneData.Studio != "" { + studioStore := db.NewStudioStore(database) + studios, _ := studioStore.Search(sceneData.Studio) + if len(studios) > 0 { + scene.StudioID = &studios[0].ID + } else { + // Create new studio + newStudio := &model.Studio{ + Name: sceneData.Studio, + Source: "adultemp", + } + if err := studioStore.Create(newStudio); err == nil { + scene.StudioID = &newStudio.ID + } + } + } + + if err := sceneStore.Create(scene); err != nil { + return fmt.Errorf("failed to save scene: %w", err) + } + + fmt.Printf("✓ Scene imported to database with ID: %d\n", scene.ID) + return nil + }, +} + +var adultempScrapePerformerCmd = &cobra.Command{ + Use: "scrape-performer [url]", + Short: "Scrape a performer from Adult Empire by URL and import to database", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + url := args[0] + + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Set auth token if provided + if flag := cmd.Flag("etoken"); flag != nil { + if etoken := flag.Value.String(); etoken != "" { + if err := scraper.SetAuthToken(etoken); err != nil { + return fmt.Errorf("failed to set auth token: %w", err) + } + } + } + + fmt.Printf("Scraping performer from %s...\n", url) + + performerData, err := scraper.ScrapePerformerByURL(context.Background(), url) + if err != nil { + return fmt.Errorf("scrape failed: %w", err) + } + + // Display scraped data + fmt.Printf("\n✓ Scraped performer data:\n") + fmt.Printf("═══════════════════════════════════════════════════\n") + fmt.Printf("Name: %s\n", performerData.Name) + if len(performerData.Aliases) > 0 { + fmt.Printf("Aliases: %v\n", performerData.Aliases) + } + if performerData.Birthdate != "" { + fmt.Printf("Birthday: %s\n", performerData.Birthdate) + } + if performerData.Ethnicity != "" { + fmt.Printf("Ethnicity: %s\n", performerData.Ethnicity) + } + if performerData.Country != "" { + fmt.Printf("Country: %s\n", performerData.Country) + } + if performerData.Height != "" { + fmt.Printf("Height: %s\n", performerData.Height) + } + if performerData.HairColor != "" { + fmt.Printf("Hair Color: %s\n", performerData.HairColor) + } + if performerData.EyeColor != "" { + fmt.Printf("Eye Color: %s\n", performerData.EyeColor) + } + if performerData.Measurements != "" { + fmt.Printf("Measurements: %s\n", performerData.Measurements) + } + if performerData.Biography != "" { + fmt.Printf("\nBio:\n%s\n", performerData.Biography) + } + fmt.Printf("═══════════════════════════════════════════════════\n\n") + + // Convert to model and save to database + performer := scraper.ConvertPerformerToModel(performerData) + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + if err := performerStore.Create(performer); err != nil { + return fmt.Errorf("failed to save performer: %w", err) + } + + fmt.Printf("✓ Performer imported to database with ID: %d\n", performer.ID) + return nil + }, +} + +var adultempSearchMovieCmd = &cobra.Command{ + Use: "search-movie [query]", + Short: "Search for movies on Adult Empire", + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + query := args[0] + + fmt.Println("🔍 Searching Adult Empire for movies...") + fmt.Printf("Query: \"%s\"\n\n", query) + + fmt.Println("⚠️ Movie search is not yet implemented.") + fmt.Println() + fmt.Println("For now, please:") + fmt.Println("1. Go to https://www.adultdvdempire.com") + fmt.Printf("2. Search for: \"%s\"\n", query) + fmt.Println("3. Copy the URL of the movie you want") + fmt.Println("4. Import it with: ./goondex adultemp scrape-movie [url]") + + return nil + }, +} + +var adultempScrapeMovieCmd = &cobra.Command{ + Use: "scrape-movie [url]", + Short: "Scrape and import a movie from Adult Empire by URL", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + movieURL := args[0] + + fmt.Println("📥 Scraping movie from Adult Empire...") + fmt.Printf("URL: %s\n\n", movieURL) + + fmt.Println("⚠️ Movie scraping is not yet fully implemented.") + fmt.Println() + fmt.Println("The movie database schema is ready, but the Adult Empire") + fmt.Println("movie scraper needs to be completed.") + fmt.Println() + fmt.Println("📌 What's ready:") + fmt.Println(" • Movie database table and schema") + fmt.Println(" • Movie web UI for browsing") + fmt.Println(" • Movie-Scene relationships") + fmt.Println() + fmt.Println("📌 What's needed:") + fmt.Println(" • Adult Empire HTML parser for movies") + fmt.Println(" • Movie data extraction (title, cast, scenes, etc.)") + fmt.Println() + fmt.Println("💡 This feature is planned for the next release.") + + return nil + }, +} + +var adultempMergePerformerCmd = &cobra.Command{ + Use: "merge-performer [id] [adultemp-url]", + Short: "Merge Adult Empire data into an existing performer", + Long: `Fetch performer data from Adult Empire and intelligently merge it with existing TPDB data.`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + // Parse ID + var performerID int64 + if _, err := fmt.Sscanf(args[0], "%d", &performerID); err != nil { + return fmt.Errorf("invalid performer ID: %s", args[0]) + } + + adultempURL := args[1] + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + + // Get existing performer + performer, err := performerStore.GetByID(performerID) + if err != nil { + return fmt.Errorf("performer not found: %w", err) + } + + fmt.Printf("Merging Adult Empire data into: %s (ID: %d)\n", performer.Name, performer.ID) + + // Scrape Adult Empire data + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Set auth token if provided + if flag := cmd.Flag("etoken"); flag != nil { + if etoken := flag.Value.String(); etoken != "" { + if err := scraper.SetAuthToken(etoken); err != nil { + return fmt.Errorf("failed to set auth token: %w", err) + } + } + } + + fmt.Printf("Scraping Adult Empire data from: %s\n", adultempURL) + + adultempData, err := scraper.ScrapePerformerByURL(context.Background(), adultempURL) + if err != nil { + return fmt.Errorf("failed to scrape Adult Empire: %w", err) + } + + // Check if names match + if !merger.ShouldMerge(performer.Name, adultempData.Name) { + fmt.Printf("\n⚠ Warning: Names don't match closely:\n") + fmt.Printf(" Database: %s\n", performer.Name) + fmt.Printf(" Adult Empire: %s\n", adultempData.Name) + fmt.Printf("\nProceed anyway? This may merge data for different performers.\n") + fmt.Printf("Type 'yes' to continue: ") + + var response string + fmt.Scanln(&response) + if response != "yes" { + fmt.Println("Merge cancelled.") + return nil + } + } + + // Merge the data + fmt.Println("\n🔄 Merging data...") + mergedPerformer := merger.MergePerformerData(performer, adultempData) + + // Update the database + if err := performerStore.Update(mergedPerformer); err != nil { + return fmt.Errorf("failed to update performer: %w", err) + } + + fmt.Println("✓ Successfully merged Adult Empire data!") + fmt.Printf("\nUpdated fields:\n") + if mergedPerformer.Birthday != performer.Birthday && mergedPerformer.Birthday != "" { + fmt.Printf(" - Birthday: %s\n", mergedPerformer.Birthday) + } + if mergedPerformer.Ethnicity != performer.Ethnicity && mergedPerformer.Ethnicity != "" { + fmt.Printf(" - Ethnicity: %s\n", mergedPerformer.Ethnicity) + } + if mergedPerformer.Country != performer.Country && mergedPerformer.Country != "" { + fmt.Printf(" - Country: %s\n", mergedPerformer.Country) + } + if mergedPerformer.Height != performer.Height && mergedPerformer.Height > 0 { + fmt.Printf(" - Height: %d cm\n", mergedPerformer.Height) + } + if mergedPerformer.HairColor != performer.HairColor && mergedPerformer.HairColor != "" { + fmt.Printf(" - Hair Color: %s\n", mergedPerformer.HairColor) + } + if mergedPerformer.EyeColor != performer.EyeColor && mergedPerformer.EyeColor != "" { + fmt.Printf(" - Eye Color: %s\n", mergedPerformer.EyeColor) + } + if mergedPerformer.Aliases != performer.Aliases && mergedPerformer.Aliases != "" { + fmt.Printf(" - Aliases: %s\n", mergedPerformer.Aliases) + } + + return nil + }, +} + var importSceneCmd = &cobra.Command{ Use: "scene [query]", Short: "Search TPDB for scenes and import them to local database", @@ -624,3 +2261,573 @@ var importSceneCmd = &cobra.Command{ return nil }, } + +var importMovieCmd = &cobra.Command{ + Use: "movie [title or url]", + Short: "Import movies from Adult Empire", + Long: `Import movies from Adult Empire. TPDB does not have a movies database. + +Movies can be imported by: +1. Searching by title: ./goondex import movie "Movie Title" +2. Direct URL: ./goondex import movie https://www.adultdvdempire.com/... + +Note: For bulk movie import, movies are best imported through Adult Empire's catalog.`, + Args: cobra.MinimumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + input := args[0] + + fmt.Println("╔═══════════════════════════════════════════════════════════════╗") + fmt.Println("║ MOVIE IMPORT - ADULT EMPIRE ║") + fmt.Println("╚═══════════════════════════════════════════════════════════════╝") + fmt.Println() + fmt.Println("📌 Note: TPDB does not have movies. Movies are imported from Adult Empire.") + fmt.Println() + + // Check if input is a URL + isURL := len(input) > 4 && (input[:4] == "http" || input[:3] == "www") + + if isURL { + fmt.Printf("🔗 Importing movie from URL: %s\n", input) + fmt.Println() + fmt.Println("ℹ️ To import movies, use the Adult Empire scraper:") + fmt.Printf(" ./goondex adultemp scrape-movie %s\n", input) + } else { + fmt.Printf("🔍 Searching Adult Empire for: \"%s\"\n", input) + fmt.Println() + fmt.Println("ℹ️ To search and import movies, use:") + fmt.Printf(" 1. Search: ./goondex adultemp search-movie \"%s\"\n", input) + fmt.Println(" 2. Copy the URL of the movie you want") + fmt.Println(" 3. Import: ./goondex adultemp scrape-movie [url]") + } + + fmt.Println() + fmt.Println("💡 Tip: Movies imported from Adult Empire will include:") + fmt.Println(" • Movie title, date, studio, director") + fmt.Println(" • Front and back cover images") + fmt.Println(" • Full description") + fmt.Println(" • Cast (performers)") + fmt.Println(" • Individual scenes within the movie") + fmt.Println() + fmt.Println("For now, movies need to be imported manually. Bulk import") + fmt.Println("from Adult Empire is planned for a future release.") + + return nil + }, +} + +// Enrich command with subcommands +var enrichCmd = &cobra.Command{ + Use: "enrich", + Short: "Enrich existing data with Adult Empire metadata", + Long: `Automatically enrich performers and scenes with additional metadata from Adult Empire. + +This command searches Adult Empire for matching entities and merges the data: +- TPDB data is primary (never overwritten) +- Adult Empire fills in missing fields (bio, ethnicity, measurements, etc.) +- Fuzzy name matching ensures accurate matches (70% threshold) +- Rate limited to prevent API overload + +Examples: + goondex enrich performer 123 # Enrich single performer + goondex enrich all-performers # Enrich all performers + goondex enrich scene 456 # Enrich single scene + goondex enrich all-scenes # Enrich all scenes`, +} + +func init() { + enrichCmd.AddCommand(enrichPerformerCmd) + enrichCmd.AddCommand(enrichAllPerformersCmd) + enrichCmd.AddCommand(enrichSceneCmd) + enrichCmd.AddCommand(enrichAllScenesCmd) + + // Flags for bulk enrich + enrichAllPerformersCmd.Flags().Int("start-id", 1, "Performer ID to start from (for resuming)") + enrichAllPerformersCmd.Flags().Int("limit", 0, "Maximum performers to enrich (0 = all)") + enrichAllPerformersCmd.Flags().Duration("rate-limit", 500*time.Millisecond, "Delay between searches") + enrichAllScenesCmd.Flags().Int("start-id", 1, "Scene ID to start from (for resuming)") + enrichAllScenesCmd.Flags().Int("limit", 0, "Maximum scenes to enrich (0 = all)") + enrichAllScenesCmd.Flags().Duration("rate-limit", 500*time.Millisecond, "Delay between searches") +} + +var enrichPerformerCmd = &cobra.Command{ + Use: "performer [id]", + Short: "Enrich a single performer with Adult Empire data", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + id := args[0] + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + + // Get performer from DB + var performerID int64 + if _, err := fmt.Sscanf(id, "%d", &performerID); err != nil { + return fmt.Errorf("invalid performer ID: %w", err) + } + + performer, err := performerStore.GetByID(performerID) + if err != nil { + return fmt.Errorf("performer not found: %w", err) + } + + fmt.Printf("🔍 Enriching performer: %s (ID: %d)\n", performer.Name, performer.ID) + + // Search Adult Empire + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + fmt.Printf(" Searching Adult Empire for '%s'...\n", performer.Name) + results, err := scraper.SearchPerformersByName(context.Background(), performer.Name) + if err != nil { + return fmt.Errorf("search failed: %w", err) + } + + if len(results) == 0 { + fmt.Println(" ⚠ No matches found on Adult Empire") + return nil + } + + // Find best match using fuzzy matching + var bestMatch *adultemp.SearchResult + for i := range results { + if merger.ShouldMerge(performer.Name, results[i].Title) { + bestMatch = &results[i] + break + } + } + + if bestMatch == nil { + fmt.Printf(" ⚠ No confident matches found (searched %d results)\n", len(results)) + fmt.Println(" 💡 Tip: First result was:", results[0].Title) + return nil + } + + fmt.Printf(" ✓ Found match: %s\n", bestMatch.Title) + fmt.Printf(" 📄 Scraping: %s\n", bestMatch.URL) + + // Scrape full data + adultempData, err := scraper.ScrapePerformerByURL(context.Background(), bestMatch.URL) + if err != nil { + return fmt.Errorf("scraping failed: %w", err) + } + + // Merge data + merged := merger.MergePerformerData(performer, adultempData) + + // Update in database + if err := performerStore.Update(merged); err != nil { + return fmt.Errorf("failed to update performer: %w", err) + } + + fmt.Printf(" ✓ Successfully enriched %s\n", performer.Name) + fmt.Println() + fmt.Println("📊 Enriched fields:") + if adultempData.Birthdate != "" { + fmt.Printf(" • Birthday: %s\n", adultempData.Birthdate) + } + if adultempData.Ethnicity != "" { + fmt.Printf(" • Ethnicity: %s\n", adultempData.Ethnicity) + } + if adultempData.HairColor != "" { + fmt.Printf(" • Hair Color: %s\n", adultempData.HairColor) + } + if adultempData.EyeColor != "" { + fmt.Printf(" • Eye Color: %s\n", adultempData.EyeColor) + } + if adultempData.Measurements != "" { + fmt.Printf(" • Measurements: %s\n", adultempData.Measurements) + } + + return nil + }, +} + +var enrichAllPerformersCmd = &cobra.Command{ + Use: "all-performers", + Short: "Enrich all performers with Adult Empire data", + Long: `Automatically enrich all performers in the database with Adult Empire metadata. + +This process: +- Searches Adult Empire for each performer by name +- Uses fuzzy matching to find confident matches +- Merges data only for high-confidence matches (70% name similarity) +- Rate limited to prevent API overload (default 500ms between searches) +- Tracks progress and can be resumed + +Examples: + goondex enrich all-performers # Enrich all performers + goondex enrich all-performers --start-id 100 # Resume from ID 100 + goondex enrich all-performers --limit 50 # Enrich first 50 performers + goondex enrich all-performers --rate-limit 1s # Slower rate limit`, + RunE: func(cmd *cobra.Command, args []string) error { + startID, _ := cmd.Flags().GetInt("start-id") + limit, _ := cmd.Flags().GetInt("limit") + rateLimit, _ := cmd.Flags().GetDuration("rate-limit") + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + performerStore := db.NewPerformerStore(database) + + // Get all performers + performers, err := performerStore.Search("") + if err != nil { + return fmt.Errorf("failed to search performers: %w", err) + } + + fmt.Printf("🔍 Enriching performers from Adult Empire\n") + fmt.Printf(" Total performers: %d\n", len(performers)) + fmt.Printf(" Start ID: %d\n", startID) + if limit > 0 { + fmt.Printf(" Limit: %d\n", limit) + } + fmt.Printf(" Rate limit: %v\n", rateLimit) + fmt.Println() + + // Create scraper + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Track stats + totalProcessed := 0 + totalEnriched := 0 + totalSkipped := 0 + totalFailed := 0 + + for _, performer := range performers { + // Skip if before start ID + if performer.ID < int64(startID) { + continue + } + + // Check limit + if limit > 0 && totalProcessed >= limit { + break + } + + totalProcessed++ + fmt.Printf("[%d/%d] Processing: %s (ID: %d)\n", totalProcessed, len(performers), performer.Name, performer.ID) + + // Search Adult Empire + results, err := scraper.SearchPerformersByName(context.Background(), performer.Name) + if err != nil { + fmt.Printf(" ⚠ Search failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + if len(results) == 0 { + fmt.Println(" ⚠ No matches found") + totalSkipped++ + time.Sleep(rateLimit) + continue + } + + // Find best match using fuzzy matching + var bestMatch *adultemp.SearchResult + for i := range results { + if merger.ShouldMerge(performer.Name, results[i].Title) { + bestMatch = &results[i] + break + } + } + + if bestMatch == nil { + fmt.Printf(" ⚠ No confident match (closest: %s)\n", results[0].Title) + totalSkipped++ + time.Sleep(rateLimit) + continue + } + + fmt.Printf(" ✓ Found: %s\n", bestMatch.Title) + + // Scrape full data + adultempData, err := scraper.ScrapePerformerByURL(context.Background(), bestMatch.URL) + if err != nil { + fmt.Printf(" ⚠ Scraping failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + // Merge data + merged := merger.MergePerformerData(&performer, adultempData) + + // Update in database + if err := performerStore.Update(merged); err != nil { + fmt.Printf(" ⚠ Update failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + fmt.Printf(" ✓ Enriched successfully\n") + totalEnriched++ + + // Rate limiting + time.Sleep(rateLimit) + } + + fmt.Println() + fmt.Println("═══════════════════════════════════════") + fmt.Printf("✓ Enrichment Complete\n") + fmt.Printf(" Processed: %d\n", totalProcessed) + fmt.Printf(" Enriched: %d\n", totalEnriched) + fmt.Printf(" Skipped: %d\n", totalSkipped) + fmt.Printf(" Failed: %d\n", totalFailed) + fmt.Println("═══════════════════════════════════════") + + return nil + }, +} + +var enrichSceneCmd = &cobra.Command{ + Use: "scene [id]", + Short: "Enrich a single scene with Adult Empire data", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + id := args[0] + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + sceneStore := db.NewSceneStore(database) + + // Get scene from DB + var sceneID int64 + if _, err := fmt.Sscanf(id, "%d", &sceneID); err != nil { + return fmt.Errorf("invalid scene ID: %w", err) + } + + scene, err := sceneStore.GetByID(sceneID) + if err != nil { + return fmt.Errorf("scene not found: %w", err) + } + + fmt.Printf("🔍 Enriching scene: %s (ID: %d)\n", scene.Title, scene.ID) + + // Search Adult Empire + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + fmt.Printf(" Searching Adult Empire for '%s'...\n", scene.Title) + results, err := scraper.SearchScenesByName(context.Background(), scene.Title) + if err != nil { + return fmt.Errorf("search failed: %w", err) + } + + if len(results) == 0 { + fmt.Println(" ⚠ No matches found on Adult Empire") + return nil + } + + // Find best match using fuzzy matching + var bestMatch *adultemp.SearchResult + for i := range results { + if merger.ShouldMerge(scene.Title, results[i].Title) { + bestMatch = &results[i] + break + } + } + + if bestMatch == nil { + fmt.Printf(" ⚠ No confident matches found (searched %d results)\n", len(results)) + fmt.Println(" 💡 Tip: First result was:", results[0].Title) + return nil + } + + fmt.Printf(" ✓ Found match: %s\n", bestMatch.Title) + fmt.Printf(" 📄 Scraping: %s\n", bestMatch.URL) + + // Scrape full data + adultempData, err := scraper.ScrapeSceneByURL(context.Background(), bestMatch.URL) + if err != nil { + return fmt.Errorf("scraping failed: %w", err) + } + + // Merge scene data (fill in missing fields) + if scene.Description == "" && adultempData.Description != "" { + scene.Description = adultempData.Description + } + if scene.Director == "" && adultempData.Director != "" { + scene.Director = adultempData.Director + } + if scene.ImageURL == "" && adultempData.Image != "" { + scene.ImageURL = adultempData.Image + } + + // Update in database + if err := sceneStore.Update(scene); err != nil { + return fmt.Errorf("failed to update scene: %w", err) + } + + fmt.Printf(" ✓ Successfully enriched %s\n", scene.Title) + + return nil + }, +} + +var enrichAllScenesCmd = &cobra.Command{ + Use: "all-scenes", + Short: "Enrich all scenes with Adult Empire data", + Long: `Automatically enrich all scenes in the database with Adult Empire metadata. + +This process: +- Searches Adult Empire for each scene by title +- Uses fuzzy matching to find confident matches +- Merges data only for high-confidence matches (70% title similarity) +- Rate limited to prevent API overload (default 500ms between searches) +- Tracks progress and can be resumed`, + RunE: func(cmd *cobra.Command, args []string) error { + startID, _ := cmd.Flags().GetInt("start-id") + limit, _ := cmd.Flags().GetInt("limit") + rateLimit, _ := cmd.Flags().GetDuration("rate-limit") + + database, err := getDB() + if err != nil { + return err + } + defer database.Close() + + sceneStore := db.NewSceneStore(database) + + // Get all scenes + scenes, err := sceneStore.Search("") + if err != nil { + return fmt.Errorf("failed to search scenes: %w", err) + } + + fmt.Printf("🔍 Enriching scenes from Adult Empire\n") + fmt.Printf(" Total scenes: %d\n", len(scenes)) + fmt.Printf(" Start ID: %d\n", startID) + if limit > 0 { + fmt.Printf(" Limit: %d\n", limit) + } + fmt.Printf(" Rate limit: %v\n", rateLimit) + fmt.Println() + + // Create scraper + scraper, err := adultemp.NewScraper() + if err != nil { + return fmt.Errorf("failed to create scraper: %w", err) + } + + // Track stats + totalProcessed := 0 + totalEnriched := 0 + totalSkipped := 0 + totalFailed := 0 + + for _, scene := range scenes { + // Skip if before start ID + if scene.ID < int64(startID) { + continue + } + + // Check limit + if limit > 0 && totalProcessed >= limit { + break + } + + totalProcessed++ + fmt.Printf("[%d/%d] Processing: %s (ID: %d)\n", totalProcessed, len(scenes), scene.Title, scene.ID) + + // Search Adult Empire + results, err := scraper.SearchScenesByName(context.Background(), scene.Title) + if err != nil { + fmt.Printf(" ⚠ Search failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + if len(results) == 0 { + fmt.Println(" ⚠ No matches found") + totalSkipped++ + time.Sleep(rateLimit) + continue + } + + // Find best match using fuzzy matching + var bestMatch *adultemp.SearchResult + for i := range results { + if merger.ShouldMerge(scene.Title, results[i].Title) { + bestMatch = &results[i] + break + } + } + + if bestMatch == nil { + fmt.Printf(" ⚠ No confident match (closest: %s)\n", results[0].Title) + totalSkipped++ + time.Sleep(rateLimit) + continue + } + + fmt.Printf(" ✓ Found: %s\n", bestMatch.Title) + + // Scrape full data + adultempData, err := scraper.ScrapeSceneByURL(context.Background(), bestMatch.URL) + if err != nil { + fmt.Printf(" ⚠ Scraping failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + // Merge scene data (fill in missing fields) + if scene.Description == "" && adultempData.Description != "" { + scene.Description = adultempData.Description + } + if scene.Director == "" && adultempData.Director != "" { + scene.Director = adultempData.Director + } + if scene.ImageURL == "" && adultempData.Image != "" { + scene.ImageURL = adultempData.Image + } + + // Update in database + if err := sceneStore.Update(&scene); err != nil { + fmt.Printf(" ⚠ Update failed: %v\n", err) + totalFailed++ + time.Sleep(rateLimit) + continue + } + + fmt.Printf(" ✓ Enriched successfully\n") + totalEnriched++ + + // Rate limiting + time.Sleep(rateLimit) + } + + fmt.Println() + fmt.Println("═══════════════════════════════════════") + fmt.Printf("✓ Enrichment Complete\n") + fmt.Printf(" Processed: %d\n", totalProcessed) + fmt.Printf(" Enriched: %d\n", totalEnriched) + fmt.Printf(" Skipped: %d\n", totalSkipped) + fmt.Printf(" Failed: %d\n", totalFailed) + fmt.Println("═══════════════════════════════════════") + + return nil + }, +} diff --git a/docs/ADULT_EMPIRE_SCRAPER.md b/docs/ADULT_EMPIRE_SCRAPER.md new file mode 100644 index 0000000..61da1a1 --- /dev/null +++ b/docs/ADULT_EMPIRE_SCRAPER.md @@ -0,0 +1,329 @@ +# Adult Empire Scraper Integration + +**Version**: v0.1.0-dev4 +**Last Updated**: 2025-11-16 + +## Overview + +Goondex now includes a full-featured Adult Empire scraper based on the Stash app's scraping architecture. This allows you to fetch metadata, cover art, and performer information directly from Adult Empire (adultdvdempire.com). + +## Features + +### ✅ Scene Scraping +- Extract scene title, description, release date +- Download cover art/thumbnails +- Retrieve studio information +- Get performer lists +- Extract tags/categories +- Scene code/SKU +- Director information + +### ✅ Performer Scraping +- Extract performer name, aliases +- Download profile images +- Retrieve birthdate, ethnicity, nationality +- Physical attributes (height, measurements, hair/eye color) +- Biography text + +### ✅ Search Functionality +- Search scenes by title +- Search performers by name +- Get search results with thumbnails + +## Architecture + +The Adult Empire scraper is implemented in `/internal/scraper/adultemp/` with the following components: + +### Files + +1. **`types.go`** - Data structures for scraped content +2. **`client.go`** - HTTP client with cookie/session management +3. **`xpath.go`** - XPath parsing utilities for HTML extraction +4. **`scraper.go`** - Main scraper implementation + +### Components + +``` +┌─────────────────┐ +│ Scraper API │ - ScrapeSceneByURL() +│ │ - ScrapePerformerByURL() +│ │ - SearchScenesByName() +│ │ - SearchPerformersByName() +└────────┬────────┘ + │ + ▼ +┌─────────────────┐ +│ HTTP Client │ - Cookie jar for sessions +│ │ - Age verification +│ │ - Auth token support +└────────┬────────┘ + │ + ▼ +┌─────────────────┐ +│ XPath Parser │ - Extract data from HTML +│ │ - Parse dates, heights +│ │ - Clean text content +└─────────────────┘ +``` + +## Usage + +### Authentication (Optional) + +For full access to Adult Empire content, you can set an authentication token: + +```go +scraper, err := adultemp.NewScraper() +if err != nil { + log.Fatal(err) +} + +// Optional: Set your Adult Empire session token +scraper.SetAuthToken("your-etoken-here") +``` + +**Getting your etoken:** +1. Log into adultdvdempire.com +2. Open browser DevTools (F12) +3. Go to Application → Cookies → adultdvdempire.com +4. Copy the value of the `etoken` cookie + +### Scrape a Scene by URL + +```go +ctx := context.Background() +sceneData, err := scraper.ScrapeSceneByURL(ctx, "https://www.adultdvdempire.com/12345/scene-name") +if err != nil { + log.Fatal(err) +} + +// Convert to Goondex model +scene := scraper.ConvertSceneToModel(sceneData) + +// Save to database +// db.Scenes.Create(scene) +``` + +### Search for Scenes + +```go +results, err := scraper.SearchScenesByName(ctx, "scene title") +if err != nil { + log.Fatal(err) +} + +for _, result := range results { + fmt.Printf("Title: %s\n", result.Title) + fmt.Printf("URL: %s\n", result.URL) + fmt.Printf("Image: %s\n", result.Image) +} +``` + +### Scrape a Performer + +```go +performerData, err := scraper.ScrapePerformerByURL(ctx, "https://www.adultdvdempire.com/performer/12345/name") +if err != nil { + log.Fatal(err) +} + +// Convert to Goondex model +performer := scraper.ConvertPerformerToModel(performerData) +``` + +### Search for Performers + +```go +results, err := scraper.SearchPerformersByName(ctx, "performer name") +if err != nil { + log.Fatal(err) +} + +for _, result := range results { + fmt.Printf("Name: %s\n", result.Title) + fmt.Printf("URL: %s\n", result.URL) +} +``` + +## Data Structures + +### SceneData + +```go +type SceneData struct { + Title string // Scene title + URL string // Adult Empire URL + Date string // Release date + Studio string // Studio name + Image string // Cover image URL + Description string // Synopsis/description + Performers []string // List of performer names + Tags []string // Categories/tags + Code string // Scene code/SKU + Director string // Director name +} +``` + +### PerformerData + +```go +type PerformerData struct { + Name string // Performer name + URL string // Adult Empire URL + Image string // Profile image URL + Birthdate string // Date of birth + Ethnicity string // Ethnicity + Country string // Country of origin + Height string // Height (converted to cm) + Measurements string // Body measurements + HairColor string // Hair color + EyeColor string // Eye color + Biography string // Bio text + Aliases []string // Alternative names +} +``` + +## XPath Selectors + +The scraper uses XPath to extract data from Adult Empire pages. Key selectors include: + +### Scene Selectors +- **Title**: `//h1[@class='title']` +- **Date**: `//div[@class='release-date']/text()` +- **Studio**: `//a[contains(@href, '/studio/')]/text()` +- **Image**: `//div[@class='item-image']//img/@src` +- **Description**: `//div[@class='synopsis']` +- **Performers**: `//a[contains(@href, '/performer/')]/text()` +- **Tags**: `//a[contains(@href, '/category/')]/text()` + +### Performer Selectors +- **Name**: `//h1[@class='performer-name']` +- **Image**: `//div[@class='performer-image']//img/@src` +- **Birthdate**: `//span[@class='birthdate']/text()` +- **Height**: `//span[@class='height']/text()` +- **Bio**: `//div[@class='bio']` + +**Note**: Adult Empire may change their HTML structure. If scraping fails, XPath selectors in `scraper.go` may need updates. + +## Utilities + +### Date Parsing + +```go +dateStr := ParseDate("Jan 15, 2024") // Handles various formats +``` + +### Height Conversion + +```go +heightCm := ParseHeight("5'6\"") // Converts feet/inches to cm (168) +``` + +### Text Cleaning + +```go +cleanedText := CleanText(rawHTML) // Removes "Show More/Less" and extra whitespace +``` + +### URL Normalization + +```go +fullURL := ExtractURL("/path/to/scene", "https://www.adultdvdempire.com") +// Returns: "https://www.adultdvdempire.com/path/to/scene" +``` + +## Integration with Goondex + +The Adult Empire scraper integrates seamlessly with the existing Goondex architecture: + +1. **Scrape** data from Adult Empire using the scraper +2. **Convert** to Goondex models using converter functions +3. **Save** to the database using existing stores +4. **Display** in the web UI with cover art and metadata + +### Example Workflow + +```go +// 1. Search for a scene +results, _ := scraper.SearchScenesByName(ctx, "scene name") + +// 2. Pick the first result and scrape full details +sceneData, _ := scraper.ScrapeSceneByURL(ctx, results[0].URL) + +// 3. Convert to Goondex model +scene := scraper.ConvertSceneToModel(sceneData) + +// 4. Save to database +sceneStore := db.NewSceneStore(database) +sceneStore.Create(scene) + +// 5. Now it appears in the web UI! +``` + +## Future Enhancements + +Planned improvements for the Adult Empire scraper: + +- ⏳ **Bulk Import** - Import entire studios or series +- ⏳ **Auto-Update** - Periodically refresh metadata +- ⏳ **Image Caching** - Download and cache cover art locally +- ⏳ **Duplicate Detection** - Avoid importing the same scene twice +- ⏳ **Advanced Search** - Filter by studio, date range, tags +- ⏳ **Web UI Integration** - Search and import from the dashboard + +## Troubleshooting + +### "Failed to parse HTML" +- The Adult Empire page structure may have changed +- Update XPath selectors in `scraper.go` + +### "Request failed: 403 Forbidden" +- You may need to set an auth token +- Adult Empire may be blocking automated requests +- Try setting a valid `etoken` cookie + +### "No results found" +- Check that the search query is correct +- Adult Empire search may have different spelling +- Try broader search terms + +### Scene/Performer data incomplete +- Some fields may not be present on all pages +- XPath selectors may need adjustment +- Check the raw HTML to verify field availability + +## Comparison with TPDB Scraper + +| Feature | TPDB | Adult Empire | +|---------|------|--------------| +| **API** | ✅ Official JSON API | ❌ HTML scraping | +| **Auth** | ✅ API key | ⚠️ Session cookie | +| **Rate Limits** | ✅ Documented | ⚠️ Unknown | +| **Stability** | ✅ Stable schema | ⚠️ May change | +| **Coverage** | ✅ Comprehensive | ✅ Comprehensive | +| **Images** | ✅ High quality | ✅ High quality | + +**Recommendation**: Use TPDB as the primary source and Adult Empire as a fallback or supplemental source. + +## Contributing + +To improve Adult Empire scraping: + +1. Update XPath selectors if Adult Empire changes their HTML +2. Add support for additional fields +3. Improve date/height parsing +4. Add more robust error handling + +## Version History + +- **v0.1.0-dev4** (2025-11-16): Initial Adult Empire scraper implementation + - HTTP client with cookie support + - XPath parsing utilities + - Scene and performer scraping + - Search functionality + - Model conversion utilities + +--- + +**Last Updated**: 2025-11-16 +**Maintainer**: Goondex Team diff --git a/docs/API_QUICK_REFERENCE.md b/docs/API_QUICK_REFERENCE.md new file mode 100644 index 0000000..2cccf45 --- /dev/null +++ b/docs/API_QUICK_REFERENCE.md @@ -0,0 +1,204 @@ +# Goondex API Quick Reference + +**Quick lookup for all API endpoints** + +## Base URL +``` +http://localhost:8080 +``` + +--- + +## Search & Import APIs + +| Method | Endpoint | Description | Request Body | +|--------|----------|-------------|--------------| +| `POST` | `/api/import/performer` | Import performer by name search | `{"query": "name"}` | +| `POST` | `/api/import/studio` | Import studio by name search | `{"query": "name"}` | +| `POST` | `/api/import/scene` | Import scene by title search | `{"query": "title"}` | + +--- + +## Bulk Import APIs + +| Method | Endpoint | Description | Request Body | +|--------|----------|-------------|--------------| +| `POST` | `/api/import/all` | Import all data (performers, studios, scenes) | None | +| `POST` | `/api/import/all-performers` | Import all performers from database | None | +| `POST` | `/api/import/all-studios` | Import all studios from database | None | +| `POST` | `/api/import/all-scenes` | Import all scenes from database | None | + +--- + +## Bulk Import with Real-time Progress (SSE) + +| Method | Endpoint | Description | Returns | +|--------|----------|-------------|---------| +| `GET` | `/api/import/all-performers/progress` | Import performers with SSE updates | Event stream | +| `GET` | `/api/import/all-studios/progress` | Import studios with SSE updates | Event stream | +| `GET` | `/api/import/all-scenes/progress` | Import scenes with SSE updates | Event stream | + +--- + +## Sync APIs + +| Method | Endpoint | Description | Request Body | +|--------|----------|-------------|--------------| +| `POST` | `/api/sync` | Sync all data with TPDB | `{"force": true/false}` (optional) | +| `GET` | `/api/sync/status` | Get last sync timestamp for all entities | None | + +--- + +## Search API + +| Method | Endpoint | Description | Query Params | +|--------|----------|-------------|--------------| +| `GET` | `/api/search` | Global search across all entities | `?q=search_term` | + +**Response:** +```javascript +{ + "success": true, + "message": "Found 25 results", + "data": { + "performers": [...], + "studios": [...], + "scenes": [...], + "tags": [...], + "total": 25 + } +} +``` + +--- + +## Quick Examples + +### Import a Performer +```javascript +fetch('http://localhost:8080/api/import/performer', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: 'Jane Doe' }) +}) +.then(res => res.json()) +.then(data => console.log(data)); +``` + +### Global Search +```javascript +fetch('http://localhost:8080/api/search?q=search_term') + .then(res => res.json()) + .then(data => console.log(data.data)); +``` + +### Bulk Import with Progress +```javascript +const eventSource = new EventSource( + 'http://localhost:8080/api/import/all-performers/progress' +); + +eventSource.onmessage = (event) => { + const update = JSON.parse(event.data); + console.log(update); + + if (update.complete) { + eventSource.close(); + } +}; +``` + +### Sync Data +```javascript +fetch('http://localhost:8080/api/sync', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ force: false }) +}) +.then(res => res.json()) +.then(data => console.log(data)); +``` + +--- + +## Standard Response Format + +All endpoints return JSON in this format: + +```javascript +{ + "success": true, // boolean + "message": "...", // string + "data": { ... } // object (optional) +} +``` + +**Success Response:** +```javascript +{ + "success": true, + "message": "Imported 5 performer(s)", + "data": { "imported": 5, "found": 5 } +} +``` + +**Error Response:** +```javascript +{ + "success": false, + "message": "TPDB_API_KEY not configured" +} +``` + +--- + +## Common HTTP Status Codes + +| Code | Meaning | +|------|---------| +| `200` | Success | +| `400` | Bad Request (invalid data) | +| `404` | Not Found | +| `405` | Method Not Allowed (e.g., GET instead of POST) | +| `500` | Internal Server Error | + +--- + +## HTML/Page Routes (for reference) + +These serve HTML pages, not JSON: + +| Route | Description | +|-------|-------------| +| `/` | Dashboard | +| `/performers` | Performer list | +| `/performers/{id}` | Performer detail | +| `/studios` | Studio list | +| `/studios/{id}` | Studio detail | +| `/scenes` | Scene list | +| `/scenes/{id}` | Scene detail | +| `/movies` | Movie list | +| `/movies/{id}` | Movie detail | + +Query parameters for lists: +- `?q=search_term` - Search filter +- `?nationality=US` - Filter performers by nationality +- `?gender=female` - Filter performers by gender + +--- + +## Environment Setup + +Make sure the backend is configured with: + +```bash +export TPDB_API_KEY="your-api-key-here" +``` + +Without this, import and sync endpoints will fail with: +```javascript +{ + "success": false, + "message": "TPDB_API_KEY not configured" +} +``` diff --git a/docs/COLOR_SCHEME.md b/docs/COLOR_SCHEME.md new file mode 100644 index 0000000..0356cd5 --- /dev/null +++ b/docs/COLOR_SCHEME.md @@ -0,0 +1,277 @@ +# Goondex Color Scheme + +## Overview + +Goondex uses a carefully curated dark mode color palette centered around **Flamingo Pulse Pink** (#FF4FA3) as the primary brand color. This bold, vibrant aesthetic creates a modern, energetic interface while maintaining excellent readability and visual hierarchy. + +## Color Palette + +### Primary Colors + +| Color Name | Hex Code | RGB Values | Usage | +|------------|----------|------------|-------| +| **Flamingo Pulse Pink** | `#FF4FA3` | `rgb(255, 79, 163)` | Primary brand color, buttons, accents, links | +| **Hot Pink** | `#FF66C4` | `rgb(255, 102, 196)` | Data keypoints, hover states, highlights | +| **Lilac Tint** | `#D78BE0` | `rgb(215, 139, 224)` | Section headers, secondary accents | + +### Text Colors + +| Color Name | Hex Code | RGB Values | Usage | +|------------|----------|------------|-------| +| **Soft White** | `#F8F8F8` | `rgb(248, 248, 248)` | Primary text, headings | +| **Muted Grey** | `#9BA0A8` | `rgb(155, 160, 168)` | Secondary text, descriptions, labels | + +### Background Colors + +| Color Name | Hex Code | RGB Values | Usage | +|------------|----------|------------|-------| +| **Deep Black** | `#09090b` | `rgb(9, 9, 11)` | Main background | +| **Card Dark** | `#18181b` | `rgb(24, 24, 27)` | Card backgrounds, navbar | +| **Elevated Dark** | `#27272a` | `rgb(39, 39, 42)` | Elevated elements, inputs, hover states | +| **Border Grey** | `#3f3f46` | `rgb(63, 63, 70)` | Borders, dividers | + +### Status & Utility Colors + +| Color Name | Hex Code | RGB Values | Usage | +|------------|----------|------------|-------| +| **Cool Cyan** | `#7EE7E7` | `rgb(126, 231, 231)` | Info badges, dates, metadata | +| **Peach Warning** | `#FFAA88` | `rgb(255, 170, 136)` | Warnings, alerts, errors | + +## Complete Color Reference + +### All Colors with Full Details + +``` +BRAND COLORS +============ +Flamingo Pulse Pink + Hex: #FF4FA3 + RGB: rgb(255, 79, 163) + RGBA: rgba(255, 79, 163, 1.0) + HSL: hsl(331, 100%, 65%) + +Hot Pink + Hex: #FF66C4 + RGB: rgb(255, 102, 196) + RGBA: rgba(255, 102, 196, 1.0) + HSL: hsl(323, 100%, 70%) + +Lilac Tint + Hex: #D78BE0 + RGB: rgb(215, 139, 224) + RGBA: rgba(215, 139, 224, 1.0) + HSL: hsl(294, 57%, 71%) + +TEXT COLORS +=========== +Soft White + Hex: #F8F8F8 + RGB: rgb(248, 248, 248) + RGBA: rgba(248, 248, 248, 1.0) + HSL: hsl(0, 0%, 97%) + +Muted Grey + Hex: #9BA0A8 + RGB: rgb(155, 160, 168) + RGBA: rgba(155, 160, 168, 1.0) + HSL: hsl(217, 7%, 63%) + +BACKGROUND COLORS +================= +Deep Black + Hex: #09090b + RGB: rgb(9, 9, 11) + RGBA: rgba(9, 9, 11, 1.0) + HSL: hsl(240, 10%, 4%) + +Card Dark + Hex: #18181b + RGB: rgb(24, 24, 27) + RGBA: rgba(24, 24, 27, 1.0) + HSL: hsl(240, 6%, 10%) + +Elevated Dark + Hex: #27272a + RGB: rgb(39, 39, 42) + RGBA: rgba(39, 39, 42, 1.0) + HSL: hsl(240, 4%, 16%) + +Border Grey + Hex: #3f3f46 + RGB: rgb(63, 63, 70) + RGBA: rgba(63, 63, 70, 1.0) + HSL: hsl(240, 5%, 26%) + +STATUS COLORS +============= +Cool Cyan + Hex: #7EE7E7 + RGB: rgb(126, 231, 231) + RGBA: rgba(126, 231, 231, 1.0) + HSL: hsl(180, 70%, 70%) + +Peach Warning + Hex: #FFAA88 + RGB: rgb(255, 170, 136) + RGBA: rgba(255, 170, 136, 1.0) + HSL: hsl(17, 100%, 77%) +``` + +## CSS Variables + +The color scheme is implemented using CSS custom properties for easy theming and consistency: + +```css +:root { + /* Brand Colors */ + --color-brand: #FF4FA3; /* rgb(255, 79, 163) */ + --color-brand-hover: #FF66C4; /* rgb(255, 102, 196) */ + --color-keypoint: #FF66C4; /* rgb(255, 102, 196) */ + --color-header: #D78BE0; /* rgb(215, 139, 224) */ + + /* Text Colors */ + --color-text-primary: #F8F8F8; /* rgb(248, 248, 248) */ + --color-text-secondary: #9BA0A8; /* rgb(155, 160, 168) */ + + /* Background Colors */ + --color-bg-dark: #09090b; /* rgb(9, 9, 11) */ + --color-bg-card: #18181b; /* rgb(24, 24, 27) */ + --color-bg-elevated: #27272a; /* rgb(39, 39, 42) */ + --color-border: #3f3f46; /* rgb(63, 63, 70) */ + + /* Status Colors */ + --color-info: #7EE7E7; /* rgb(126, 231, 231) */ + --color-warning: #FFAA88; /* rgb(255, 170, 136) */ +} +``` + +## Usage Guidelines + +### Buttons & Interactive Elements + +**Primary Action Buttons:** +- Background: Linear gradient from `#FF4FA3` to `#FF66C4` +- Glow effect: `box-shadow: 0 2px 8px rgba(255, 79, 163, 0.3)` +- Hover: Brightness increase and stronger glow + +**Secondary Buttons:** +- Border: `2px solid #FF4FA3` +- Background: Transparent +- Hover: Background `rgba(255, 79, 163, 0.1)` + +### Text Hierarchy + +1. **Page Headings (h1, h2):** `#F8F8F8` (Soft White) +2. **Section Headers (h3):** `#D78BE0` (Lilac Tint) +3. **Body Text:** `#F8F8F8` (Soft White) +4. **Labels & Descriptions:** `#9BA0A8` (Muted Grey) +5. **Links:** `#FF4FA3` (Flamingo Pulse Pink) → `#FF66C4` (Hot Pink) on hover + +### Cards & Containers + +```css +background: var(--color-bg-card); +border: 1px solid var(--color-border); +box-shadow: 0 2px 8px rgba(255, 79, 163, 0.1); +``` + +### Tags & Badges + +```css +background: rgba(255, 79, 163, 0.15); +color: var(--color-brand); +border: 1px solid rgba(255, 79, 163, 0.3); +``` + +### Progress Bars + +```css +background: linear-gradient(135deg, #FF4FA3 0%, #FF66C4 100%); +box-shadow: 0 0 10px rgba(255, 79, 163, 0.5); +``` + +## Accessibility Considerations + +### Contrast Ratios + +All text colors have been chosen to meet WCAG AA standards for contrast against their backgrounds: + +- **White text (#F8F8F8) on Dark background (#09090b):** 17.8:1 ✓ +- **Pink links (#FF4FA3) on Dark background (#09090b):** 6.2:1 ✓ +- **Grey text (#9BA0A8) on Dark background (#09090b):** 7.5:1 ✓ + +### Color Blindness + +The pink/purple palette maintains good visibility for most forms of color blindness: +- **Protanopia (red-blind):** Pink appears more muted but still distinct +- **Deuteranopia (green-blind):** Minimal impact, colors remain vibrant +- **Tritanopia (blue-blind):** Pink shifts slightly warmer but remains distinct + +## Gradients + +Goondex uses gradients sparingly for emphasis on interactive elements: + +### Primary Gradient +```css +background: linear-gradient(135deg, #FF4FA3 0%, #FF66C4 100%); +``` +**Used for:** Buttons, progress bars, active states + +### Header Gradient +```css +background: linear-gradient(135deg, #FF4FA3 0%, #D78BE0 100%); +``` +**Used for:** Page headers, feature highlights + +## Effects + +### Glow Effects + +Pink glow for emphasis: +```css +box-shadow: 0 0 10px rgba(255, 79, 163, 0.5); +``` + +### Hover Effects + +Subtle brightness increase: +```css +filter: brightness(1.1); +``` + +### Focus States + +Pink outline for keyboard navigation: +```css +outline: 2px solid var(--color-brand); +outline-offset: 2px; +``` + +## Dark Mode + +Goondex is **dark mode by default**. The deep black background (#09090b) provides: +- Reduced eye strain in low-light conditions +- Better OLED screen efficiency +- Enhanced focus on content +- Modern, sleek aesthetic + +The pink accent creates strong visual contrast against the dark background, making interactive elements immediately identifiable. + +## Brand Identity + +The Flamingo Pulse Pink color scheme reflects: +- **Energy & Vibrancy:** Bold pink conveys excitement and engagement +- **Modernity:** Dark mode with neon accents is contemporary and tech-forward +- **Sophistication:** Lilac and muted greys add refinement +- **Approachability:** Pink is warm and inviting despite the dark theme + +## Version History + +- **v0.3.5-r1:** Original color scheme established +- **v0.1.0-dev1:** Initial implementation +- **v0.1.0-dev2:** Full TPDB integration maintaining brand colors +- **v0.1.0-dev3:** Enhanced with progress bars and glow effects + +--- + +**Last Updated:** 2025-11-15 diff --git a/docs/FRONTEND_API_GUIDE.md b/docs/FRONTEND_API_GUIDE.md new file mode 100644 index 0000000..baf8384 --- /dev/null +++ b/docs/FRONTEND_API_GUIDE.md @@ -0,0 +1,978 @@ +# Goondex Frontend API Guide + +**For Frontend Developers** + +This guide explains how to interact with the Goondex API using JavaScript. No backend knowledge required - just JavaScript, HTML, CSS, and Bootstrap skills! + +## Table of Contents + +1. [Getting Started](#getting-started) +2. [Base URL](#base-url) +3. [Data Models](#data-models) +4. [API Endpoints](#api-endpoints) +5. [Common Workflows](#common-workflows) +6. [Error Handling](#error-handling) +7. [Real-time Progress Updates](#real-time-progress-updates) +8. [Complete Examples](#complete-examples) + +--- + +## Getting Started + +All API endpoints return JSON data. You can use the `fetch` API to make requests from your JavaScript code. + +### Basic API Response Format + +All API responses follow this structure: + +```javascript +{ + "success": true, // boolean - whether the operation succeeded + "message": "Success text", // string - human-readable message + "data": { ... } // object - actual data (optional) +} +``` + +--- + +## Base URL + +The API server runs at: `http://localhost:8080` (by default) + +All endpoints are prefixed with this URL. + +--- + +## Data Models + +### Performer + +```javascript +{ + "id": 123, + "name": "Performer Name", + "aliases": "Alias1, Alias2", + + // Physical Attributes + "gender": "female", // male/female/trans/other + "birthday": "1995-03-15", // YYYY-MM-DD + "astrology": "Pisces", + "birthplace": "Los Angeles, CA", + "ethnicity": "Caucasian", + "nationality": "US", // ISO country code (US, GB, FR, etc.) + "country": "United States", + "eye_color": "Blue", + "hair_color": "Blonde", + "height": 165, // centimeters + "weight": 55, // kilograms + "measurements": "34C-24-36", + "cup_size": "34C", + "tattoo_description": "Dragon on left shoulder", + "piercing_description": "Nose piercing", + "boob_job": "False", // "True" or "False" as string + + // Career + "career": "2015-2023", + "career_start_year": 2015, + "career_end_year": 2023, + "date_of_death": "", // YYYY-MM-DD if applicable + "active": true, + + // Media + "image_path": "/path/to/image.jpg", + "image_url": "https://example.com/image.jpg", + "poster_url": "https://example.com/poster.jpg", + "bio": "Biography text...", + + // Metadata + "source": "tpdb", + "source_id": "abc-123-def", + "source_numeric_id": 456, + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-02T12:00:00Z" +} +``` + +### Studio + +```javascript +{ + "id": 456, + "name": "Studio Name", + "parent_id": 123, // null if no parent studio + "image_path": "/path/to/logo.jpg", + "image_url": "https://example.com/logo.jpg", + "description": "Studio description...", + "source": "tpdb", + "source_id": "xyz-789", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-02T12:00:00Z" +} +``` + +### Scene + +```javascript +{ + "id": 789, + "title": "Scene Title", + "code": "SCENE-001", // DVD code or scene identifier + "date": "2024-01-15", // Release date YYYY-MM-DD + "studio_id": 456, + "description": "Scene description...", + "image_path": "/path/to/thumbnail.jpg", + "image_url": "https://example.com/thumbnail.jpg", + "director": "Director Name", + "url": "https://example.com/scene", + "source": "tpdb", + "source_id": "scene-123", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-02T12:00:00Z", + + // Relationships (when populated) + "performers": [ /* array of Performer objects */ ], + "tags": [ /* array of Tag objects */ ], + "studio": { /* Studio object */ } +} +``` + +### Movie + +```javascript +{ + "id": 321, + "title": "Movie Title", + "date": "2024-01-01", // Release date + "studio_id": 456, + "description": "Movie description...", + "director": "Director Name", + "duration": 120, // Duration in minutes + "image_path": "/path/to/cover.jpg", + "image_url": "https://example.com/cover.jpg", + "back_image_url": "https://example.com/back-cover.jpg", + "url": "https://example.com/movie", + "source": "tpdb", + "source_id": "movie-456", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-02T12:00:00Z", + + // Relationships (when populated) + "scenes": [ /* array of Scene objects */ ], + "performers": [ /* array of Performer objects */ ], + "tags": [ /* array of Tag objects */ ], + "studio": { /* Studio object */ } +} +``` + +### Tag + +```javascript +{ + "id": 111, + "name": "Tag Name", + "category_id": 5, + "aliases": "Alias1, Alias2", + "description": "Tag description...", + "source": "tpdb", + "source_id": "tag-789", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-02T12:00:00Z" +} +``` + +--- + +## API Endpoints + +### Search & Import + +#### 1. Import Performer by Search + +**Endpoint:** `POST /api/import/performer` + +**Description:** Search for a performer and import all matching results from TPDB. + +**Request Body:** +```javascript +{ + "query": "performer name" +} +``` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/performer', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + query: 'Jane Doe' + }) +}); + +const result = await response.json(); +// result = { +// "success": true, +// "message": "Imported 5 performer(s)", +// "data": { "imported": 5, "found": 5 } +// } +``` + +#### 2. Import Studio by Search + +**Endpoint:** `POST /api/import/studio` + +**Request Body:** +```javascript +{ + "query": "studio name" +} +``` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/studio', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: 'Brazzers' }) +}); + +const result = await response.json(); +``` + +#### 3. Import Scene by Search + +**Endpoint:** `POST /api/import/scene` + +**Description:** Search for a scene and import all matching results. This also imports associated performers, studio, and tags. + +**Request Body:** +```javascript +{ + "query": "scene title" +} +``` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/scene', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: 'Scene Title' }) +}); + +const result = await response.json(); +``` + +### Bulk Import + +#### 4. Bulk Import All + +**Endpoint:** `POST /api/import/all` + +**Description:** Import all performers, studios, and scenes from your local database. This fetches full metadata from TPDB. + +**No Request Body Required** + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/all', { + method: 'POST' +}); + +const result = await response.json(); +// result.data contains import statistics +``` + +#### 5. Bulk Import All Performers + +**Endpoint:** `POST /api/import/all-performers` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/all-performers', { + method: 'POST' +}); + +const result = await response.json(); +// result = { +// "success": true, +// "message": "Imported 150/200 performers", +// "data": { +// "total": 200, +// "imported": 150, +// "skipped": 50, +// "errors": 0 +// } +// } +``` + +#### 6. Bulk Import All Studios + +**Endpoint:** `POST /api/import/all-studios` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/all-studios', { + method: 'POST' +}); + +const result = await response.json(); +``` + +#### 7. Bulk Import All Scenes + +**Endpoint:** `POST /api/import/all-scenes` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/import/all-scenes', { + method: 'POST' +}); + +const result = await response.json(); +``` + +### Sync + +#### 8. Sync All Data + +**Endpoint:** `POST /api/sync` + +**Description:** Synchronize all data with TPDB to get the latest updates. + +**Request Body (Optional):** +```javascript +{ + "force": false // Set to true to force sync even if recently synced +} +``` + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/sync', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ force: true }) +}); + +const result = await response.json(); +``` + +#### 9. Get Sync Status + +**Endpoint:** `GET /api/sync/status` + +**Description:** Get the last sync time for all entities. + +**Example:** +```javascript +const response = await fetch('http://localhost:8080/api/sync/status'); +const result = await response.json(); +// result.data contains sync status for each entity type +``` + +### Global Search + +#### 10. Search Everything + +**Endpoint:** `GET /api/search?q=query` + +**Description:** Search across performers, studios, scenes, and tags simultaneously. + +**Example:** +```javascript +const query = 'search term'; +const response = await fetch(`http://localhost:8080/api/search?q=${encodeURIComponent(query)}`); +const result = await response.json(); + +// result.data = { +// "performers": [...], +// "studios": [...], +// "scenes": [...], +// "tags": [...], +// "total": 25 +// } +``` + +--- + +## Real-time Progress Updates + +For bulk imports, there are special endpoints that provide real-time progress updates using Server-Sent Events (SSE). + +### Bulk Import with Progress + +These endpoints stream progress updates as the import happens: + +- `GET /api/import/all-performers/progress` +- `GET /api/import/all-studios/progress` +- `GET /api/import/all-scenes/progress` + +**Example with EventSource:** + +```javascript +// Create an EventSource to listen for progress updates +const eventSource = new EventSource('http://localhost:8080/api/import/all-performers/progress'); + +eventSource.onmessage = function(event) { + const update = JSON.parse(event.data); + + if (update.error) { + console.error('Import error:', update.error); + eventSource.close(); + return; + } + + if (update.complete) { + console.log('Import complete!', update.result); + eventSource.close(); + return; + } + + // Progress update + console.log(`Progress: ${update.current}/${update.total}`); + console.log(`Current item: ${update.name}`); + console.log(`Status: ${update.status}`); + + // Update UI + updateProgressBar(update.current, update.total); +}; + +eventSource.onerror = function(error) { + console.error('EventSource error:', error); + eventSource.close(); +}; +``` + +**Progress Update Format:** + +```javascript +{ + "current": 15, // Current item number + "total": 100, // Total items to process + "name": "Jane Doe", // Name of current item being processed + "status": "importing" // Status message +} +``` + +**Completion Format:** + +```javascript +{ + "complete": true, + "result": { + "total": 100, + "imported": 95, + "skipped": 4, + "errors": 1 + } +} +``` + +--- + +## Common Workflows + +### 1. Search and Display Performers + +```javascript +// Search for performers +async function searchPerformers(searchQuery) { + const response = await fetch( + `http://localhost:8080/api/search?q=${encodeURIComponent(searchQuery)}` + ); + const result = await response.json(); + + if (result.success) { + return result.data.performers; + } + throw new Error(result.message); +} + +// Display in HTML +async function displayPerformers() { + const performers = await searchPerformers('jane'); + + const container = document.getElementById('performers-list'); + container.innerHTML = performers.map(p => ` +
+ ${p.name} +
+
${p.name}
+

+ ${p.nationality ? getFlagEmoji(p.nationality) : ''} + ${p.gender || 'Unknown'} +

+ View Details +
+
+ `).join(''); +} + +// Helper: Convert country code to flag emoji +function getFlagEmoji(countryCode) { + const codePoints = countryCode + .toUpperCase() + .split('') + .map(char => 127397 + char.charCodeAt()); + return String.fromCodePoint(...codePoints); +} +``` + +### 2. Import Data with Progress Bar + +```javascript +async function importPerformersWithProgress() { + const progressBar = document.getElementById('progress-bar'); + const statusText = document.getElementById('status-text'); + + const eventSource = new EventSource( + 'http://localhost:8080/api/import/all-performers/progress' + ); + + eventSource.onmessage = function(event) { + const update = JSON.parse(event.data); + + if (update.error) { + statusText.textContent = `Error: ${update.error}`; + progressBar.style.width = '100%'; + progressBar.classList.add('bg-danger'); + eventSource.close(); + return; + } + + if (update.complete) { + statusText.textContent = + `Complete! Imported ${update.result.imported}/${update.result.total}`; + progressBar.style.width = '100%'; + progressBar.classList.add('bg-success'); + eventSource.close(); + return; + } + + // Update progress + const percent = (update.current / update.total) * 100; + progressBar.style.width = `${percent}%`; + statusText.textContent = + `Importing ${update.name} (${update.current}/${update.total})`; + }; + + eventSource.onerror = function(error) { + statusText.textContent = 'Connection error'; + progressBar.classList.add('bg-danger'); + eventSource.close(); + }; +} +``` + +### 3. Sync Data + +```javascript +async function syncAllData(forceSync = false) { + const response = await fetch('http://localhost:8080/api/sync', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ force: forceSync }) + }); + + const result = await response.json(); + + if (result.success) { + console.log('Sync completed:', result.data); + return result.data; + } + throw new Error(result.message); +} + +// Check last sync status +async function checkSyncStatus() { + const response = await fetch('http://localhost:8080/api/sync/status'); + const result = await response.json(); + + if (result.success) { + console.log('Last sync times:', result.data); + return result.data; + } + throw new Error(result.message); +} +``` + +### 4. Search and Import New Performer + +```javascript +async function importNewPerformer(performerName) { + const response = await fetch('http://localhost:8080/api/import/performer', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: performerName }) + }); + + const result = await response.json(); + + if (result.success) { + alert(`Successfully imported ${result.data.imported} performer(s)`); + return result.data; + } + alert(`Failed: ${result.message}`); + throw new Error(result.message); +} + +// Usage with a form +document.getElementById('import-form').addEventListener('submit', async (e) => { + e.preventDefault(); + const performerName = document.getElementById('performer-name').value; + await importNewPerformer(performerName); + // Refresh the performer list + location.reload(); +}); +``` + +--- + +## Error Handling + +### Best Practices + +Always check the `success` field in the response: + +```javascript +async function safeApiCall(url, options = {}) { + try { + const response = await fetch(url, options); + + // Check HTTP status + if (!response.ok) { + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + } + + const result = await response.json(); + + // Check API success field + if (!result.success) { + throw new Error(result.message); + } + + return result.data; + } catch (error) { + console.error('API Error:', error); + throw error; + } +} + +// Usage +try { + const performers = await safeApiCall('http://localhost:8080/api/search?q=jane'); + console.log('Performers:', performers); +} catch (error) { + alert(`Error: ${error.message}`); +} +``` + +### Common Error Responses + +**API Key Not Configured:** +```javascript +{ + "success": false, + "message": "TPDB_API_KEY not configured" +} +``` + +**No Results Found:** +```javascript +{ + "success": false, + "message": "No performers found" +} +``` + +**Search Failed:** +```javascript +{ + "success": false, + "message": "Search failed: connection timeout" +} +``` + +--- + +## Complete Examples + +### Example 1: Search Form with Results + +```html + + + + + + Goondex Search + + + +
+

Search Goondex

+ +
+
+ + +
+
+ +
+
+ + + + +``` + +### Example 2: Import Progress with Bootstrap + +```html + + + + + + Import Performers + + + +
+

Import All Performers

+ + + + + + +
+ + + + +``` + +--- + +## Tips for Frontend Developers + +### 1. Always Use `encodeURIComponent` for Query Parameters + +```javascript +// Good +const query = 'Jane Doe & Associates'; +fetch(`/api/search?q=${encodeURIComponent(query)}`); + +// Bad - will break with special characters +fetch(`/api/search?q=${query}`); +``` + +### 2. Handle Image Loading Errors + +```javascript +${performer.name} +``` + +### 3. Use Bootstrap Classes for Quick Styling + +```html + +
Success!
+
Error!
+
Warning!
+ + +
+ Loading... +
+ + +
+
75%
+
+``` + +### 4. Debounce Search Input + +```javascript +function debounce(func, wait) { + let timeout; + return function(...args) { + clearTimeout(timeout); + timeout = setTimeout(() => func.apply(this, args), wait); + }; +} + +// Usage +const searchInput = document.getElementById('search'); +const debouncedSearch = debounce(performSearch, 300); +searchInput.addEventListener('input', (e) => debouncedSearch(e.target.value)); +``` + +### 5. Format Dates for Display + +```javascript +function formatDate(dateString) { + if (!dateString) return 'Unknown'; + const date = new Date(dateString); + return date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' + }); +} + +// Usage +console.log(formatDate('2024-01-15')); // "January 15, 2024" +``` + +### 6. Calculate Age from Birthday + +```javascript +function calculateAge(birthday) { + if (!birthday) return null; + const birthDate = new Date(birthday); + const today = new Date(); + let age = today.getFullYear() - birthDate.getFullYear(); + const monthDiff = today.getMonth() - birthDate.getMonth(); + if (monthDiff < 0 || (monthDiff === 0 && today.getDate() < birthDate.getDate())) { + age--; + } + return age; +} + +// Usage +const age = calculateAge('1995-03-15'); +console.log(`Age: ${age}`); +``` + +--- + +## Need Help? + +If you have questions about the API or need clarification: + +1. Check the data model structures above +2. Look at the complete examples +3. Test endpoints using browser DevTools Network tab +4. Consult your backend developer if you need custom endpoints + +Happy coding! diff --git a/docs/HTML_TEMPLATES_GUIDE.md b/docs/HTML_TEMPLATES_GUIDE.md new file mode 100644 index 0000000..ffb9650 --- /dev/null +++ b/docs/HTML_TEMPLATES_GUIDE.md @@ -0,0 +1,1165 @@ +# Goondex HTML/CSS Templates Guide + +**For Frontend Developers - HTML/CSS Focus** + +This guide provides complete, ready-to-use HTML templates with Bootstrap styling. All JavaScript is included as simple copy-paste snippets with detailed comments. + +## Getting Started + +All you need to know: +1. Copy the HTML template you need +2. The JavaScript is already included in ` + + + +``` + +--- + +## Performer Import Form + +**File: `import-performer.html`** + +Simple form to search and import performers from TPDB. + +```html + + + + + + Import Performer - Goondex + + + + + + + + +
+

Import Performer

+ +
+
+

+ Search for a performer by name. All matching results from TPDB will be imported. +

+ +
+
+ + +
+ + +
+ + + +
+
+
+ + + + + +``` + +--- + +## Bulk Import with Progress + +**File: `bulk-import.html`** + +Import all performers with a real-time progress bar. + +```html + + + + + + Bulk Import - Goondex + + + + + + + + +
+

Bulk Import Performers

+ +
+
+

+ Import all performers from your database. This will fetch full metadata from TPDB. +

+ + + + + + +
+
+
+ + + + + +``` + +--- + +## Simple Data Display + +**File: `performers-list.html`** + +Display a list of performers from the database. + +```html + + + + + + Performers - Goondex + + + + + + + + +
+

All Performers

+ + +
+
+ Loading... +
+

Loading performers...

+
+ + +
+
+ + + + + +``` + +--- + +## Bootstrap Component Reference + +Quick reference for Bootstrap 5 components to use in your pages. + +### Buttons + +```html + + + + + + + + + + + + + + + + + + + + +``` + +### Alerts + +```html + +
+ Success! Your action was successful. +
+ + +
+ Error! Something went wrong. +
+ + +
+ Warning! Please be careful. +
+ + +
+ Info: Here's some information. +
+ + +
+ Success message + +
+``` + +### Cards + +```html + +
+ ... +
+
Card Title
+

Card description goes here.

+ Go somewhere +
+
+ + +
+
+ Featured +
+
+
Special title
+

Card content

+
+
+``` + +### Forms + +```html + +
+
+ + +
+ +
+ + +
+ + +
+ + + + + + + + + +``` + +### Progress Bars + +```html + +
+
50%
+
+ + +
+
+
+ +
+
+
+ + +
+
+
+``` + +### Spinners + +```html + +
+ Loading... +
+ + +
+
+
+ + +
+ + +
+ Loading... +
+``` + +### Lists + +```html + + + + +
+ + Clickable item + + + Active item + +
+``` + +### Grid System + +```html + +
+
Column 1
+
Column 2
+
Column 3
+
Column 4
+
+ + +
+
Left column
+
Right column
+
+ + +
+
Sidebar
+
Main content
+
+``` + +--- + +## CSS Tips + +### Spacing Utilities + +```html + +
Margin on all sides
+
Margin top
+
Margin bottom
+
Margin left and right
+
Margin top and bottom
+ + +
Padding on all sides
+
Padding top
+
Padding bottom
+
Padding left and right
+
Padding top and bottom
+ + +``` + +### Text Utilities + +```html + +
Left aligned
+
Center aligned
+
Right aligned
+ + +

Primary text

+

Success text

+

Danger text

+

Muted text

+ + +

Bold text

+

Normal text

+

Light text

+ + +

Very large

+

Medium

+``` + +### Display Utilities + +```html + +
Hidden
+
Shown as block
+
Inline block
+
Flexbox container
+ + +
Hidden on mobile, shown on tablet+
+``` + +--- + +## Quick JavaScript Snippets + +### Show/Hide Elements + +```javascript +// Hide element +document.getElementById('myElement').style.display = 'none'; + +// Show element +document.getElementById('myElement').style.display = 'block'; + +// Toggle visibility +const el = document.getElementById('myElement'); +el.style.display = el.style.display === 'none' ? 'block' : 'none'; +``` + +### Change Text Content + +```javascript +document.getElementById('myElement').textContent = 'New text'; +document.getElementById('myElement').innerHTML = 'HTML text'; +``` + +### Add/Remove CSS Classes + +```javascript +// Add class +document.getElementById('myElement').classList.add('active'); + +// Remove class +document.getElementById('myElement').classList.remove('active'); + +// Toggle class +document.getElementById('myElement').classList.toggle('active'); +``` + +### Get Form Values + +```javascript +const inputValue = document.getElementById('myInput').value; +const selectValue = document.getElementById('mySelect').value; +``` + +--- + +## Need Help? + +All the JavaScript in these templates is ready to use - just copy and paste! If you need to customize something: + +1. Look for comments in the JavaScript (`// like this`) +2. The `API_BASE` constant at the top controls the server URL +3. Use AI assistance to modify the JavaScript parts +4. Focus on the HTML/CSS which you're already great at! + +The templates are fully functional and follow Bootstrap best practices. diff --git a/docs/INDEX.md b/docs/INDEX.md index 0ef49d7..7692746 100644 --- a/docs/INDEX.md +++ b/docs/INDEX.md @@ -18,6 +18,7 @@ Goondex is a fast, local-first media indexer for adult content. It ingests metad - [Architecture Overview](ARCHITECTURE.md) - System design and components - [Database Schema](DATABASE_SCHEMA.md) - SQLite database structure - [Data Models](DATA_MODELS.md) - Internal data structures +- [Color Scheme](COLOR_SCHEME.md) - UI color palette and branding guidelines ### Integration - [TPDB Integration](TPDB_INTEGRATION.md) - ThePornDB API integration guide diff --git a/go.mod b/go.mod index 8cd0511..1df860c 100644 --- a/go.mod +++ b/go.mod @@ -3,12 +3,16 @@ module git.leaktechnologies.dev/stu/Goondex go 1.25.4 require ( + github.com/antchfx/htmlquery v1.3.5 github.com/spf13/cobra v1.10.1 + golang.org/x/net v0.47.0 modernc.org/sqlite v1.40.0 ) require ( + github.com/antchfx/xpath v1.3.5 // indirect github.com/dustin/go-humanize v1.0.1 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/uuid v1.6.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect @@ -16,7 +20,8 @@ require ( github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/spf13/pflag v1.0.9 // indirect golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect - golang.org/x/sys v0.36.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.31.0 // indirect modernc.org/libc v1.66.10 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect diff --git a/go.sum b/go.sum index d733d53..f5f4023 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,13 @@ +github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0= +github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA= +github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ= +github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -18,17 +25,85 @@ github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= -golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= -golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= -golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= -golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= -golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg= -golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.26.5 h1:xM3bX7Mve6G8K8b+T11ReenJOT+BmVqQj0FY5T4+5Y4= diff --git a/internal/db/db.go b/internal/db/db.go index ca0c1d4..cb0a007 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -37,7 +37,30 @@ func Open(dbPath string) (*DB, error) { return nil, fmt.Errorf("failed to initialize schema: %w", err) } - return &DB{conn: conn}, nil + db := &DB{conn: conn} + + // Seed tag categories and common tags + if err := db.seedDatabase(); err != nil { + conn.Close() + return nil, fmt.Errorf("failed to seed database: %w", err) + } + + return db, nil +} + +// seedDatabase populates tag categories and common tags +func (db *DB) seedDatabase() error { + // Seed tag categories + if _, err := db.conn.Exec(SeedTagCategories); err != nil { + return fmt.Errorf("failed to seed tag categories: %w", err) + } + + // Seed common tags + if _, err := db.conn.Exec(SeedCommonTags); err != nil { + return fmt.Errorf("failed to seed common tags: %w", err) + } + + return nil } // Close closes the database connection diff --git a/internal/db/movie_store.go b/internal/db/movie_store.go new file mode 100644 index 0000000..9a1e181 --- /dev/null +++ b/internal/db/movie_store.go @@ -0,0 +1,211 @@ +package db + +import ( + "database/sql" + "fmt" + "time" + + "git.leaktechnologies.dev/stu/Goondex/internal/model" +) + +// MovieStore provides database operations for movies +type MovieStore struct { + db *DB +} + +// NewMovieStore creates a new MovieStore +func NewMovieStore(db *DB) *MovieStore { + return &MovieStore{db: db} +} + +// Create inserts a new movie into the database +func (s *MovieStore) Create(movie *model.Movie) error { + now := time.Now() + movie.CreatedAt = now + movie.UpdatedAt = now + + result, err := s.db.conn.Exec(` + INSERT INTO movies ( + title, date, studio_id, description, director, duration, + image_path, image_url, back_image_url, url, source, source_id, + created_at, updated_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + movie.Title, movie.Date, movie.StudioID, movie.Description, movie.Director, movie.Duration, + movie.ImagePath, movie.ImageURL, movie.BackImageURL, movie.URL, movie.Source, movie.SourceID, + now, now, + ) + if err != nil { + return fmt.Errorf("failed to create movie: %w", err) + } + + id, err := result.LastInsertId() + if err != nil { + return fmt.Errorf("failed to get movie ID: %w", err) + } + + movie.ID = id + return nil +} + +// GetByID retrieves a movie by its ID +func (s *MovieStore) GetByID(id int64) (*model.Movie, error) { + var movie model.Movie + var studioID sql.NullInt64 + + err := s.db.conn.QueryRow(` + SELECT id, title, COALESCE(date, ''), COALESCE(studio_id, 0), COALESCE(description, ''), + COALESCE(director, ''), COALESCE(duration, 0), + COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(back_image_url, ''), + COALESCE(url, ''), COALESCE(source, ''), COALESCE(source_id, ''), + created_at, updated_at + FROM movies + WHERE id = ? + `, id).Scan( + &movie.ID, &movie.Title, &movie.Date, &studioID, &movie.Description, + &movie.Director, &movie.Duration, + &movie.ImagePath, &movie.ImageURL, &movie.BackImageURL, + &movie.URL, &movie.Source, &movie.SourceID, + &movie.CreatedAt, &movie.UpdatedAt, + ) + + if err == sql.ErrNoRows { + return nil, fmt.Errorf("movie not found") + } + if err != nil { + return nil, fmt.Errorf("failed to get movie: %w", err) + } + + if studioID.Valid && studioID.Int64 > 0 { + movie.StudioID = &studioID.Int64 + } + + return &movie, nil +} + +// Search searches for movies by title +func (s *MovieStore) Search(query string) ([]model.Movie, error) { + rows, err := s.db.conn.Query(` + SELECT id, title, COALESCE(date, ''), COALESCE(studio_id, 0), COALESCE(description, ''), + COALESCE(director, ''), COALESCE(duration, 0), + COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(back_image_url, ''), + COALESCE(url, ''), COALESCE(source, ''), COALESCE(source_id, ''), + created_at, updated_at + FROM movies + WHERE title LIKE ? + ORDER BY date DESC, title ASC + LIMIT 100 + `, "%"+query+"%") + if err != nil { + return nil, fmt.Errorf("failed to search movies: %w", err) + } + defer rows.Close() + + var movies []model.Movie + for rows.Next() { + var movie model.Movie + var studioID sql.NullInt64 + + if err := rows.Scan( + &movie.ID, &movie.Title, &movie.Date, &studioID, &movie.Description, + &movie.Director, &movie.Duration, + &movie.ImagePath, &movie.ImageURL, &movie.BackImageURL, + &movie.URL, &movie.Source, &movie.SourceID, + &movie.CreatedAt, &movie.UpdatedAt, + ); err != nil { + return nil, fmt.Errorf("failed to scan movie: %w", err) + } + + if studioID.Valid && studioID.Int64 > 0 { + movie.StudioID = &studioID.Int64 + } + + movies = append(movies, movie) + } + + return movies, nil +} + +// AddScene links a scene to a movie +func (s *MovieStore) AddScene(movieID, sceneID int64, sceneNumber int) error { + _, err := s.db.conn.Exec(` + INSERT OR IGNORE INTO movie_scenes (movie_id, scene_id, scene_number) + VALUES (?, ?, ?) + `, movieID, sceneID, sceneNumber) + return err +} + +// GetScenes returns all scenes for a movie +func (s *MovieStore) GetScenes(movieID int64) ([]model.Scene, error) { + rows, err := s.db.conn.Query(` + SELECT s.id, s.title, COALESCE(s.code, ''), COALESCE(s.date, ''), COALESCE(s.studio_id, 0), + COALESCE(s.description, ''), COALESCE(s.image_path, ''), COALESCE(s.image_url, ''), + COALESCE(s.director, ''), COALESCE(s.url, ''), COALESCE(s.source, ''), COALESCE(s.source_id, ''), + s.created_at, s.updated_at, COALESCE(ms.scene_number, 0) + FROM scenes s + INNER JOIN movie_scenes ms ON s.id = ms.scene_id + WHERE ms.movie_id = ? + ORDER BY ms.scene_number ASC, s.title ASC + `, movieID) + if err != nil { + return nil, fmt.Errorf("failed to get scenes: %w", err) + } + defer rows.Close() + + var scenes []model.Scene + for rows.Next() { + var scene model.Scene + var studioID sql.NullInt64 + var sceneNumber int + + if err := rows.Scan( + &scene.ID, &scene.Title, &scene.Code, &scene.Date, &studioID, + &scene.Description, &scene.ImagePath, &scene.ImageURL, + &scene.Director, &scene.URL, &scene.Source, &scene.SourceID, + &scene.CreatedAt, &scene.UpdatedAt, &sceneNumber, + ); err != nil { + return nil, fmt.Errorf("failed to scan scene: %w", err) + } + + if studioID.Valid && studioID.Int64 > 0 { + scene.StudioID = &studioID.Int64 + } + + scenes = append(scenes, scene) + } + + return scenes, nil +} + +// GetSceneCount returns the number of scenes in a movie +func (s *MovieStore) GetSceneCount(movieID int64) (int, error) { + var count int + err := s.db.conn.QueryRow(` + SELECT COUNT(*) FROM movie_scenes WHERE movie_id = ? + `, movieID).Scan(&count) + return count, err +} + +// Update updates an existing movie +func (s *MovieStore) Update(movie *model.Movie) error { + movie.UpdatedAt = time.Now() + + _, err := s.db.conn.Exec(` + UPDATE movies SET + title = ?, date = ?, studio_id = ?, description = ?, director = ?, duration = ?, + image_path = ?, image_url = ?, back_image_url = ?, url = ?, source = ?, source_id = ?, + updated_at = ? + WHERE id = ? + `, + movie.Title, movie.Date, movie.StudioID, movie.Description, movie.Director, movie.Duration, + movie.ImagePath, movie.ImageURL, movie.BackImageURL, movie.URL, movie.Source, movie.SourceID, + movie.UpdatedAt, movie.ID, + ) + return err +} + +// Delete removes a movie from the database +func (s *MovieStore) Delete(id int64) error { + _, err := s.db.conn.Exec("DELETE FROM movies WHERE id = ?", id) + return err +} diff --git a/internal/db/performer_store.go b/internal/db/performer_store.go index 2662dc8..b26b62b 100644 --- a/internal/db/performer_store.go +++ b/internal/db/performer_store.go @@ -104,13 +104,13 @@ func (s *PerformerStore) GetByID(id int64) (*model.Performer, error) { err := s.db.conn.QueryRow(` SELECT - id, name, aliases, - gender, birthday, astrology, birthplace, ethnicity, nationality, country, - eye_color, hair_color, height, weight, measurements, cup_size, - tattoo_description, piercing_description, boob_job, - career, career_start_year, career_end_year, date_of_death, active, - image_path, image_url, poster_url, bio, - source, source_id, source_numeric_id, + id, name, COALESCE(aliases, ''), + COALESCE(gender, ''), COALESCE(birthday, ''), COALESCE(astrology, ''), COALESCE(birthplace, ''), COALESCE(ethnicity, ''), COALESCE(nationality, ''), COALESCE(country, ''), + COALESCE(eye_color, ''), COALESCE(hair_color, ''), COALESCE(height, 0), COALESCE(weight, 0), COALESCE(measurements, ''), COALESCE(cup_size, ''), + COALESCE(tattoo_description, ''), COALESCE(piercing_description, ''), COALESCE(boob_job, ''), + COALESCE(career, ''), COALESCE(career_start_year, 0), COALESCE(career_end_year, 0), COALESCE(date_of_death, ''), COALESCE(active, 0), + COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(poster_url, ''), COALESCE(bio, ''), + COALESCE(source, ''), COALESCE(source_id, ''), COALESCE(source_numeric_id, 0), created_at, updated_at FROM performers WHERE id = ? `, id).Scan( @@ -138,21 +138,23 @@ func (s *PerformerStore) GetByID(id int64) (*model.Performer, error) { return p, nil } -// Search searches for performers by name +// Search searches for performers by name, ordered by popularity (scene count) func (s *PerformerStore) Search(query string) ([]model.Performer, error) { rows, err := s.db.conn.Query(` SELECT - id, name, aliases, - gender, birthday, astrology, birthplace, ethnicity, nationality, country, - eye_color, hair_color, height, weight, measurements, cup_size, - tattoo_description, piercing_description, boob_job, - career, career_start_year, career_end_year, date_of_death, active, - image_path, image_url, poster_url, bio, - source, source_id, source_numeric_id, - created_at, updated_at - FROM performers - WHERE name LIKE ? OR aliases LIKE ? - ORDER BY name + p.id, p.name, COALESCE(p.aliases, ''), + COALESCE(p.gender, ''), COALESCE(p.birthday, ''), COALESCE(p.astrology, ''), COALESCE(p.birthplace, ''), COALESCE(p.ethnicity, ''), COALESCE(p.nationality, ''), COALESCE(p.country, ''), + COALESCE(p.eye_color, ''), COALESCE(p.hair_color, ''), COALESCE(p.height, 0), COALESCE(p.weight, 0), COALESCE(p.measurements, ''), COALESCE(p.cup_size, ''), + COALESCE(p.tattoo_description, ''), COALESCE(p.piercing_description, ''), COALESCE(p.boob_job, ''), + COALESCE(p.career, ''), COALESCE(p.career_start_year, 0), COALESCE(p.career_end_year, 0), COALESCE(p.date_of_death, ''), COALESCE(p.active, 0), + COALESCE(p.image_path, ''), COALESCE(p.image_url, ''), COALESCE(p.poster_url, ''), COALESCE(p.bio, ''), + COALESCE(p.source, ''), COALESCE(p.source_id, ''), COALESCE(p.source_numeric_id, 0), + p.created_at, p.updated_at + FROM performers p + LEFT JOIN scene_performers sp ON p.id = sp.performer_id + WHERE p.name LIKE ? OR COALESCE(p.aliases, '') LIKE ? + GROUP BY p.id + ORDER BY COUNT(sp.scene_id) DESC, p.name ASC `, "%"+query+"%", "%"+query+"%") if err != nil { @@ -190,6 +192,20 @@ func (s *PerformerStore) Search(query string) ([]model.Performer, error) { return performers, nil } +// GetSceneCount returns the number of scenes associated with a performer +func (s *PerformerStore) GetSceneCount(performerID int64) (int, error) { + var count int + err := s.db.conn.QueryRow(` + SELECT COUNT(*) FROM scene_performers WHERE performer_id = ? + `, performerID).Scan(&count) + + if err != nil { + return 0, fmt.Errorf("failed to count scenes: %w", err) + } + + return count, nil +} + // Update updates an existing performer func (s *PerformerStore) Update(p *model.Performer) error { p.UpdatedAt = time.Now() @@ -234,3 +250,57 @@ func (s *PerformerStore) Delete(id int64) error { return nil } + +// Upsert inserts or updates a performer based on source_id +func (s *PerformerStore) Upsert(p *model.Performer) error { + // Try to find existing performer by source_id + existing, err := s.GetBySourceID(p.Source, p.SourceID) + if err == nil && existing != nil { + // Update existing + p.ID = existing.ID + return s.Update(p) + } + // Create new + return s.Create(p) +} + +// GetBySourceID retrieves a performer by its source and source_id +func (s *PerformerStore) GetBySourceID(source, sourceID string) (*model.Performer, error) { + var p model.Performer + var activeInt int + + err := s.db.conn.QueryRow(` + SELECT id, name, COALESCE(aliases, ''), + COALESCE(gender, ''), COALESCE(birthday, ''), COALESCE(astrology, ''), + COALESCE(birthplace, ''), COALESCE(ethnicity, ''), COALESCE(nationality, ''), COALESCE(country, ''), + COALESCE(eye_color, ''), COALESCE(hair_color, ''), COALESCE(height, 0), COALESCE(weight, 0), + COALESCE(measurements, ''), COALESCE(cup_size, ''), COALESCE(tattoo_description, ''), COALESCE(piercing_description, ''), COALESCE(boob_job, ''), + COALESCE(career, ''), COALESCE(career_start_year, 0), COALESCE(career_end_year, 0), COALESCE(date_of_death, ''), COALESCE(active, 0), + COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(poster_url, ''), COALESCE(bio, ''), + COALESCE(source, ''), COALESCE(source_id, ''), COALESCE(source_numeric_id, 0), + created_at, updated_at + FROM performers + WHERE source = ? AND source_id = ? + `, source, sourceID).Scan( + &p.ID, &p.Name, &p.Aliases, + &p.Gender, &p.Birthday, &p.Astrology, + &p.Birthplace, &p.Ethnicity, &p.Nationality, &p.Country, + &p.EyeColor, &p.HairColor, &p.Height, &p.Weight, + &p.Measurements, &p.CupSize, &p.TattooDescription, &p.PiercingDescription, &p.BoobJob, + &p.Career, &p.CareerStartYear, &p.CareerEndYear, &p.DateOfDeath, &activeInt, + &p.ImagePath, &p.ImageURL, &p.PosterURL, &p.Bio, + &p.Source, &p.SourceID, &p.SourceNumericID, + &p.CreatedAt, &p.UpdatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("failed to get performer: %w", err) + } + + p.Active = activeInt == 1 + + return &p, nil +} diff --git a/internal/db/scene_store.go b/internal/db/scene_store.go index c83bfa8..22d14ed 100644 --- a/internal/db/scene_store.go +++ b/internal/db/scene_store.go @@ -48,7 +48,7 @@ func (s *SceneStore) GetByID(id int64) (*model.Scene, error) { var createdAt, updatedAt string err := s.db.conn.QueryRow(` - SELECT id, title, code, date, studio_id, description, image_path, image_url, director, url, source, source_id, created_at, updated_at + SELECT id, title, COALESCE(code, ''), COALESCE(date, ''), COALESCE(studio_id, 0), COALESCE(description, ''), COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(director, ''), COALESCE(url, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM scenes WHERE id = ? `, id).Scan(&scene.ID, &scene.Title, &scene.Code, &scene.Date, &scene.StudioID, &scene.Description, &scene.ImagePath, &scene.ImageURL, &scene.Director, &scene.URL, &scene.Source, &scene.SourceID, &createdAt, &updatedAt) @@ -68,9 +68,9 @@ func (s *SceneStore) GetByID(id int64) (*model.Scene, error) { // Search searches for scenes by title or code func (s *SceneStore) Search(query string) ([]model.Scene, error) { rows, err := s.db.conn.Query(` - SELECT id, title, code, date, studio_id, description, image_path, image_url, director, url, source, source_id, created_at, updated_at + SELECT id, title, COALESCE(code, ''), COALESCE(date, ''), COALESCE(studio_id, 0), COALESCE(description, ''), COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(director, ''), COALESCE(url, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM scenes - WHERE title LIKE ? OR code LIKE ? + WHERE title LIKE ? OR COALESCE(code, '') LIKE ? ORDER BY date DESC, title `, "%"+query+"%", "%"+query+"%") @@ -173,10 +173,20 @@ func (s *SceneStore) RemovePerformer(sceneID, performerID int64) error { // AddTag associates a tag with a scene func (s *SceneStore) AddTag(sceneID, tagID int64) error { + return s.AddTagWithConfidence(sceneID, tagID, 1.0, "user", false) +} + +// AddTagWithConfidence associates a tag with a scene with ML support +func (s *SceneStore) AddTagWithConfidence(sceneID, tagID int64, confidence float64, source string, verified bool) error { + verifiedInt := 0 + if verified { + verifiedInt = 1 + } + _, err := s.db.conn.Exec(` - INSERT OR IGNORE INTO scene_tags (scene_id, tag_id) - VALUES (?, ?) - `, sceneID, tagID) + INSERT OR REPLACE INTO scene_tags (scene_id, tag_id, confidence, source, verified, created_at) + VALUES (?, ?, ?, ?, ?, datetime('now')) + `, sceneID, tagID, confidence, source, verifiedInt) if err != nil { return fmt.Errorf("failed to add tag to scene: %w", err) @@ -185,6 +195,21 @@ func (s *SceneStore) AddTag(sceneID, tagID int64) error { return nil } +// VerifyTag marks a scene tag as human-verified +func (s *SceneStore) VerifyTag(sceneID, tagID int64) error { + _, err := s.db.conn.Exec(` + UPDATE scene_tags + SET verified = 1 + WHERE scene_id = ? AND tag_id = ? + `, sceneID, tagID) + + if err != nil { + return fmt.Errorf("failed to verify tag: %w", err) + } + + return nil +} + // RemoveTag removes a tag association from a scene func (s *SceneStore) RemoveTag(sceneID, tagID int64) error { _, err := s.db.conn.Exec(` @@ -198,3 +223,216 @@ func (s *SceneStore) RemoveTag(sceneID, tagID int64) error { return nil } + +// GetPerformers retrieves all performers for a scene +func (s *SceneStore) GetPerformers(sceneID int64) ([]model.Performer, error) { + rows, err := s.db.conn.Query(` + SELECT p.id, p.name, COALESCE(p.aliases, ''), COALESCE(p.gender, ''), + COALESCE(p.birthday, ''), COALESCE(p.nationality, ''), + COALESCE(p.source, ''), COALESCE(p.source_id, ''), + p.created_at, p.updated_at + FROM performers p + INNER JOIN scene_performers sp ON p.id = sp.performer_id + WHERE sp.scene_id = ? + ORDER BY p.name + `, sceneID) + + if err != nil { + return nil, fmt.Errorf("failed to get performers: %w", err) + } + defer rows.Close() + + var performers []model.Performer + for rows.Next() { + var p model.Performer + var createdAt, updatedAt string + + err := rows.Scan(&p.ID, &p.Name, &p.Aliases, &p.Gender, + &p.Birthday, &p.Nationality, &p.Source, &p.SourceID, + &createdAt, &updatedAt) + if err != nil { + return nil, fmt.Errorf("failed to scan performer: %w", err) + } + + p.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + p.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + performers = append(performers, p) + } + + return performers, nil +} + +// GetTags retrieves all tags for a scene +func (s *SceneStore) GetTags(sceneID int64) ([]model.Tag, error) { + rows, err := s.db.conn.Query(` + SELECT t.id, t.name, t.category_id, COALESCE(t.description, ''), + COALESCE(t.source, ''), COALESCE(t.source_id, ''), + t.created_at, t.updated_at + FROM tags t + INNER JOIN scene_tags st ON t.id = st.tag_id + WHERE st.scene_id = ? + ORDER BY t.name + `, sceneID) + + if err != nil { + return nil, fmt.Errorf("failed to get tags: %w", err) + } + defer rows.Close() + + var tags []model.Tag + for rows.Next() { + var t model.Tag + var createdAt, updatedAt string + + err := rows.Scan(&t.ID, &t.Name, &t.CategoryID, &t.Description, + &t.Source, &t.SourceID, &createdAt, &updatedAt) + if err != nil { + return nil, fmt.Errorf("failed to scan tag: %w", err) + } + + t.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + t.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + tags = append(tags, t) + } + + return tags, nil +} + +// Upsert inserts or updates a scene based on source_id +func (s *SceneStore) Upsert(scene *model.Scene) error { + // Try to find existing scene by source_id + existing, err := s.GetBySourceID(scene.Source, scene.SourceID) + if err == nil && existing != nil { + // Update existing + scene.ID = existing.ID + return s.Update(scene) + } + // Create new + return s.Create(scene) +} + +// GetBySourceID retrieves a scene by its source and source_id +func (s *SceneStore) GetBySourceID(source, sourceID string) (*model.Scene, error) { + var scene model.Scene + var createdAt, updatedAt string + + err := s.db.conn.QueryRow(` + SELECT id, title, COALESCE(code, ''), COALESCE(date, ''), COALESCE(studio_id, 0), + COALESCE(description, ''), COALESCE(image_path, ''), COALESCE(image_url, ''), + COALESCE(director, ''), COALESCE(url, ''), COALESCE(source, ''), COALESCE(source_id, ''), + created_at, updated_at + FROM scenes + WHERE source = ? AND source_id = ? + `, source, sourceID).Scan( + &scene.ID, &scene.Title, &scene.Code, &scene.Date, &scene.StudioID, + &scene.Description, &scene.ImagePath, &scene.ImageURL, + &scene.Director, &scene.URL, &scene.Source, &scene.SourceID, + &createdAt, &updatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("failed to get scene: %w", err) + } + + scene.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + scene.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + return &scene, nil +} + +// GetByPerformer retrieves all scenes featuring a specific performer +func (s *SceneStore) GetByPerformer(performerID int64) ([]model.Scene, error) { + rows, err := s.db.conn.Query(` + SELECT DISTINCT s.id, s.title, COALESCE(s.code, ''), COALESCE(s.date, ''), COALESCE(s.studio_id, 0), + COALESCE(s.description, ''), COALESCE(s.image_path, ''), COALESCE(s.image_url, ''), + COALESCE(s.director, ''), COALESCE(s.url, ''), COALESCE(s.source, ''), COALESCE(s.source_id, ''), + s.created_at, s.updated_at + FROM scenes s + INNER JOIN scene_performers sp ON s.id = sp.scene_id + WHERE sp.performer_id = ? + ORDER BY s.date DESC, s.title ASC + `, performerID) + + if err != nil { + return nil, fmt.Errorf("failed to get scenes for performer: %w", err) + } + defer rows.Close() + + var scenes []model.Scene + for rows.Next() { + var scene model.Scene + var createdAt, updatedAt string + + err := rows.Scan( + &scene.ID, &scene.Title, &scene.Code, &scene.Date, &scene.StudioID, + &scene.Description, &scene.ImagePath, &scene.ImageURL, + &scene.Director, &scene.URL, &scene.Source, &scene.SourceID, + &createdAt, &updatedAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan scene: %w", err) + } + + scene.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + scene.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + scenes = append(scenes, scene) + } + + return scenes, nil +} + +// GetMovies retrieves all movies that contain this scene +func (s *SceneStore) GetMovies(sceneID int64) ([]model.Movie, error) { + rows, err := s.db.conn.Query(` + SELECT m.id, m.title, COALESCE(m.date, ''), COALESCE(m.studio_id, 0), + COALESCE(m.description, ''), COALESCE(m.director, ''), COALESCE(m.duration, 0), + COALESCE(m.image_path, ''), COALESCE(m.image_url, ''), COALESCE(m.back_image_url, ''), + COALESCE(m.url, ''), COALESCE(m.source, ''), COALESCE(m.source_id, ''), + m.created_at, m.updated_at, COALESCE(ms.scene_number, 0) + FROM movies m + INNER JOIN movie_scenes ms ON m.id = ms.movie_id + WHERE ms.scene_id = ? + ORDER BY m.date DESC, m.title ASC + `, sceneID) + + if err != nil { + return nil, fmt.Errorf("failed to get movies for scene: %w", err) + } + defer rows.Close() + + var movies []model.Movie + for rows.Next() { + var m model.Movie + var studioID sql.NullInt64 + var createdAt, updatedAt string + var sceneNumber int + + err := rows.Scan( + &m.ID, &m.Title, &m.Date, &studioID, + &m.Description, &m.Director, &m.Duration, + &m.ImagePath, &m.ImageURL, &m.BackImageURL, + &m.URL, &m.Source, &m.SourceID, + &createdAt, &updatedAt, &sceneNumber, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan movie: %w", err) + } + + if studioID.Valid && studioID.Int64 > 0 { + m.StudioID = &studioID.Int64 + } + + m.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + m.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + movies = append(movies, m) + } + + return movies, nil +} diff --git a/internal/db/schema.go b/internal/db/schema.go index 02f89af..26b5855 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -68,14 +68,29 @@ CREATE TABLE IF NOT EXISTS studios ( FOREIGN KEY (parent_id) REFERENCES studios(id) ON DELETE SET NULL ); --- Tags table -CREATE TABLE IF NOT EXISTS tags ( +-- Tag Categories table (hierarchical) +CREATE TABLE IF NOT EXISTS tag_categories ( id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL UNIQUE, + parent_id INTEGER, + description TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (parent_id) REFERENCES tag_categories(id) ON DELETE CASCADE +); + +-- Tags table (enhanced with categories) +CREATE TABLE IF NOT EXISTS tags ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + category_id INTEGER NOT NULL, + aliases TEXT, + description TEXT, source TEXT, source_id TEXT, created_at TEXT NOT NULL DEFAULT (datetime('now')), - updated_at TEXT NOT NULL DEFAULT (datetime('now')) + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + UNIQUE(category_id, name), + FOREIGN KEY (category_id) REFERENCES tag_categories(id) ON DELETE CASCADE ); -- Scenes table @@ -97,6 +112,54 @@ CREATE TABLE IF NOT EXISTS scenes ( FOREIGN KEY (studio_id) REFERENCES studios(id) ON DELETE SET NULL ); +-- Movies table (full-length DVDs/releases) +CREATE TABLE IF NOT EXISTS movies ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + date TEXT, + studio_id INTEGER, + description TEXT, + director TEXT, + duration INTEGER, + image_path TEXT, + image_url TEXT, + back_image_url TEXT, + url TEXT, + source TEXT, + source_id TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (studio_id) REFERENCES studios(id) ON DELETE SET NULL +); + +-- Movie-Scene many-to-many junction table (scenes belong to movies) +CREATE TABLE IF NOT EXISTS movie_scenes ( + movie_id INTEGER NOT NULL, + scene_id INTEGER NOT NULL, + scene_number INTEGER, + PRIMARY KEY (movie_id, scene_id), + FOREIGN KEY (movie_id) REFERENCES movies(id) ON DELETE CASCADE, + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE +); + +-- Movie-Performer many-to-many junction table +CREATE TABLE IF NOT EXISTS movie_performers ( + movie_id INTEGER NOT NULL, + performer_id INTEGER NOT NULL, + PRIMARY KEY (movie_id, performer_id), + FOREIGN KEY (movie_id) REFERENCES movies(id) ON DELETE CASCADE, + FOREIGN KEY (performer_id) REFERENCES performers(id) ON DELETE CASCADE +); + +-- Movie-Tag many-to-many junction table +CREATE TABLE IF NOT EXISTS movie_tags ( + movie_id INTEGER NOT NULL, + tag_id INTEGER NOT NULL, + PRIMARY KEY (movie_id, tag_id), + FOREIGN KEY (movie_id) REFERENCES movies(id) ON DELETE CASCADE, + FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE +); + -- Scene-Performer many-to-many junction table CREATE TABLE IF NOT EXISTS scene_performers ( scene_id INTEGER NOT NULL, @@ -106,19 +169,92 @@ CREATE TABLE IF NOT EXISTS scene_performers ( FOREIGN KEY (performer_id) REFERENCES performers(id) ON DELETE CASCADE ); --- Scene-Tag many-to-many junction table +-- Scene-Tag many-to-many junction table (enhanced with ML support) CREATE TABLE IF NOT EXISTS scene_tags ( scene_id INTEGER NOT NULL, tag_id INTEGER NOT NULL, + confidence REAL DEFAULT 1.0, + source TEXT NOT NULL DEFAULT 'user', + verified INTEGER DEFAULT 0, + created_at TEXT NOT NULL DEFAULT (datetime('now')), PRIMARY KEY (scene_id, tag_id), FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE, FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE ); +-- Scene Images table (for ML training and PornPics integration) +CREATE TABLE IF NOT EXISTS scene_images ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scene_id INTEGER NOT NULL, + image_url TEXT NOT NULL, + image_path TEXT, + source TEXT, + source_id TEXT, + width INTEGER, + height INTEGER, + file_size INTEGER, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE +); + +-- ML Predictions table (track model versions and predictions) +CREATE TABLE IF NOT EXISTS ml_predictions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scene_id INTEGER, + image_id INTEGER, + model_version TEXT NOT NULL, + predictions TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE, + FOREIGN KEY (image_id) REFERENCES scene_images(id) ON DELETE CASCADE +); + -- Indexes for common queries (v0.1.0) CREATE INDEX IF NOT EXISTS idx_performers_name ON performers(name); CREATE INDEX IF NOT EXISTS idx_studios_name ON studios(name); CREATE INDEX IF NOT EXISTS idx_scenes_title ON scenes(title); CREATE INDEX IF NOT EXISTS idx_scenes_code ON scenes(code); +CREATE INDEX IF NOT EXISTS idx_scenes_date ON scenes(date); +CREATE INDEX IF NOT EXISTS idx_movies_title ON movies(title); +CREATE INDEX IF NOT EXISTS idx_movies_date ON movies(date); +CREATE INDEX IF NOT EXISTS idx_movie_scenes_movie ON movie_scenes(movie_id); +CREATE INDEX IF NOT EXISTS idx_movie_scenes_scene ON movie_scenes(scene_id); + +-- Tag search indexes (v0.2.0 - ML ready) +CREATE INDEX IF NOT EXISTS idx_tag_categories_name ON tag_categories(name); +CREATE INDEX IF NOT EXISTS idx_tag_categories_parent ON tag_categories(parent_id); CREATE INDEX IF NOT EXISTS idx_tags_name ON tags(name); +CREATE INDEX IF NOT EXISTS idx_tags_category ON tags(category_id); + +-- Scene tag filtering indexes (critical for complex queries) +CREATE INDEX IF NOT EXISTS idx_scene_tags_tag ON scene_tags(tag_id); +CREATE INDEX IF NOT EXISTS idx_scene_tags_scene ON scene_tags(scene_id); +CREATE INDEX IF NOT EXISTS idx_scene_tags_confidence ON scene_tags(confidence); +CREATE INDEX IF NOT EXISTS idx_scene_tags_verified ON scene_tags(verified); +CREATE INDEX IF NOT EXISTS idx_scene_tags_source ON scene_tags(source); + +-- Image processing indexes +CREATE INDEX IF NOT EXISTS idx_scene_images_scene ON scene_images(scene_id); +CREATE INDEX IF NOT EXISTS idx_scene_images_source ON scene_images(source, source_id); + +-- ML prediction indexes +CREATE INDEX IF NOT EXISTS idx_ml_predictions_scene ON ml_predictions(scene_id); +CREATE INDEX IF NOT EXISTS idx_ml_predictions_image ON ml_predictions(image_id); +CREATE INDEX IF NOT EXISTS idx_ml_predictions_model ON ml_predictions(model_version); + +-- Sync metadata table (track last sync times) +CREATE TABLE IF NOT EXISTS sync_metadata ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + entity_type TEXT NOT NULL UNIQUE, + last_sync_at TEXT NOT NULL, + records_updated INTEGER DEFAULT 0, + records_failed INTEGER DEFAULT 0, + status TEXT NOT NULL DEFAULT 'completed', + error_message TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_sync_metadata_entity ON sync_metadata(entity_type); +CREATE INDEX IF NOT EXISTS idx_sync_metadata_last_sync ON sync_metadata(last_sync_at); ` diff --git a/internal/db/seed_categories.go b/internal/db/seed_categories.go new file mode 100644 index 0000000..cdcbb3c --- /dev/null +++ b/internal/db/seed_categories.go @@ -0,0 +1,140 @@ +package db + +// SeedTagCategories contains SQL to populate initial tag categories +const SeedTagCategories = ` +-- Root categories +INSERT OR IGNORE INTO tag_categories (id, name, parent_id, description) VALUES +(1, 'general', NULL, 'General uncategorized tags'), +(2, 'people', NULL, 'People-related attributes'), +(3, 'clothing', NULL, 'Clothing and wardrobe'), +(4, 'position', NULL, 'Positions and poses'), +(5, 'action', NULL, 'Actions and activities'), +(6, 'setting', NULL, 'Location and environment'), +(7, 'production', NULL, 'Production quality and style'); + +-- People subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('people/count', 2, 'Number of people in scene'), +('people/ethnicity', 2, 'Ethnic background'), +('people/age_category', 2, 'Age category (teen, milf, mature, etc)'), +('people/body_type', 2, 'Body type and build'), +('people/hair', 2, 'Hair attributes'), +('people/hair/color', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Hair color'), +('people/hair/length', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Hair length'), +('people/eyes', 2, 'Eye attributes'), +('people/eyes/color', (SELECT id FROM tag_categories WHERE name = 'people/eyes'), 'Eye color'); + +-- Clothing subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('clothing/type', 3, 'Type of clothing (lingerie, uniform, etc)'), +('clothing/color', 3, 'Clothing color'), +('clothing/specific', 3, 'Specific clothing items'), +('clothing/specific/top', (SELECT id FROM tag_categories WHERE name = 'clothing/specific'), 'Upper body clothing'), +('clothing/specific/bottom', (SELECT id FROM tag_categories WHERE name = 'clothing/specific'), 'Lower body clothing'), +('clothing/specific/footwear', (SELECT id FROM tag_categories WHERE name = 'clothing/specific'), 'Shoes and footwear'), +('clothing/specific/accessories', (SELECT id FROM tag_categories WHERE name = 'clothing/specific'), 'Accessories'); + +-- Position subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('position/category', 4, 'General position category'), +('position/specific', 4, 'Specific named positions'); + +-- Action subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('action/sexual', 5, 'Sexual acts'), +('action/non_sexual', 5, 'Non-sexual activities'); + +-- Setting subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('setting/location', 6, 'Physical location'), +('setting/time', 6, 'Time of day'), +('setting/indoor_outdoor', 6, 'Indoor vs outdoor'); + +-- Production subcategories +INSERT OR IGNORE INTO tag_categories (name, parent_id, description) VALUES +('production/quality', 7, 'Video quality'), +('production/style', 7, 'Production style (POV, amateur, etc)'), +('production/camera', 7, 'Camera work and angles'); +` + +// SeedCommonTags contains SQL to populate common tags +const SeedCommonTags = ` +-- People count tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('solo', (SELECT id FROM tag_categories WHERE name = 'people/count'), 'One person'), +('duo', (SELECT id FROM tag_categories WHERE name = 'people/count'), 'Two people'), +('threesome', (SELECT id FROM tag_categories WHERE name = 'people/count'), 'Three people'), +('foursome', (SELECT id FROM tag_categories WHERE name = 'people/count'), 'Four people'), +('orgy', (SELECT id FROM tag_categories WHERE name = 'people/count'), 'Five or more people'); + +-- Ethnicity tags +INSERT OR IGNORE INTO tags (name, category_id, aliases, description) VALUES +('black', (SELECT id FROM tag_categories WHERE name = 'people/ethnicity'), 'ebony,african', 'Black/African descent'), +('white', (SELECT id FROM tag_categories WHERE name = 'people/ethnicity'), 'caucasian', 'White/Caucasian'), +('asian', (SELECT id FROM tag_categories WHERE name = 'people/ethnicity'), NULL, 'Asian descent'), +('latina', (SELECT id FROM tag_categories WHERE name = 'people/ethnicity'), 'hispanic', 'Hispanic/Latina'); + +-- Age category tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('teen', (SELECT id FROM tag_categories WHERE name = 'people/age_category'), '18-21 age range'), +('milf', (SELECT id FROM tag_categories WHERE name = 'people/age_category'), 'Mature woman (30-50)'), +('mature', (SELECT id FROM tag_categories WHERE name = 'people/age_category'), 'Mature (50+)'); + +-- Body type tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('slim', (SELECT id FROM tag_categories WHERE name = 'people/body_type'), 'Slim build'), +('athletic', (SELECT id FROM tag_categories WHERE name = 'people/body_type'), 'Athletic/fit build'), +('curvy', (SELECT id FROM tag_categories WHERE name = 'people/body_type'), 'Curvy build'), +('bbw', (SELECT id FROM tag_categories WHERE name = 'people/body_type'), 'Big beautiful woman'); + +-- Hair color tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('blonde', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Blonde hair'), +('brunette', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Brown hair'), +('redhead', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Red hair'), +('black_hair', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Black hair'); + +-- Clothing color tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('pink', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'Pink clothing'), +('black', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'Black clothing'), +('red', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'Red clothing'), +('white', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'White clothing'), +('blue', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'Blue clothing'); + +-- Footwear tags +INSERT OR IGNORE INTO tags (name, category_id, aliases, description) VALUES +('heels', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/footwear'), 'high heels', 'High heels'), +('boots', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/footwear'), NULL, 'Boots'), +('stockings', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/footwear'), 'pantyhose', 'Stockings/pantyhose'); + +-- Bottom clothing tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('panties', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/bottom'), 'Panties/underwear'), +('skirt', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/bottom'), 'Skirt'), +('jeans', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/bottom'), 'Jeans'), +('shorts', (SELECT id FROM tag_categories WHERE name = 'clothing/specific/bottom'), 'Shorts'); + +-- Position tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('missionary', (SELECT id FROM tag_categories WHERE name = 'position/specific'), 'Missionary position'), +('doggy', (SELECT id FROM tag_categories WHERE name = 'position/specific'), 'Doggy style'), +('cowgirl', (SELECT id FROM tag_categories WHERE name = 'position/specific'), 'Cowgirl position'), +('standing', (SELECT id FROM tag_categories WHERE name = 'position/category'), 'Standing position'); + +-- Setting tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('bedroom', (SELECT id FROM tag_categories WHERE name = 'setting/location'), 'Bedroom setting'), +('office', (SELECT id FROM tag_categories WHERE name = 'setting/location'), 'Office setting'), +('outdoor', (SELECT id FROM tag_categories WHERE name = 'setting/indoor_outdoor'), 'Outdoor setting'), +('indoor', (SELECT id FROM tag_categories WHERE name = 'setting/indoor_outdoor'), 'Indoor setting'); + +-- Production quality tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES +('hd', (SELECT id FROM tag_categories WHERE name = 'production/quality'), 'High definition (720p+)'), +('4k', (SELECT id FROM tag_categories WHERE name = 'production/quality'), '4K resolution'), +('vr', (SELECT id FROM tag_categories WHERE name = 'production/quality'), 'Virtual reality'), +('pov', (SELECT id FROM tag_categories WHERE name = 'production/style'), 'Point of view'), +('amateur', (SELECT id FROM tag_categories WHERE name = 'production/style'), 'Amateur production'), +('professional', (SELECT id FROM tag_categories WHERE name = 'production/style'), 'Professional production'); +` diff --git a/internal/db/studio_store.go b/internal/db/studio_store.go index 223cf97..66d809e 100644 --- a/internal/db/studio_store.go +++ b/internal/db/studio_store.go @@ -48,7 +48,7 @@ func (s *StudioStore) GetByID(id int64) (*model.Studio, error) { var createdAt, updatedAt string err := s.db.conn.QueryRow(` - SELECT id, name, parent_id, image_path, image_url, description, source, source_id, created_at, updated_at + SELECT id, name, COALESCE(parent_id, 0), COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM studios WHERE id = ? `, id).Scan(&studio.ID, &studio.Name, &studio.ParentID, &studio.ImagePath, &studio.ImageURL, &studio.Description, &studio.Source, &studio.SourceID, &createdAt, &updatedAt) @@ -68,7 +68,7 @@ func (s *StudioStore) GetByID(id int64) (*model.Studio, error) { // Search searches for studios by name func (s *StudioStore) Search(query string) ([]model.Studio, error) { rows, err := s.db.conn.Query(` - SELECT id, name, parent_id, image_path, image_url, description, source, source_id, created_at, updated_at + SELECT id, name, COALESCE(parent_id, 0), COALESCE(image_path, ''), COALESCE(image_url, ''), COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM studios WHERE name LIKE ? ORDER BY name @@ -124,6 +124,20 @@ func (s *StudioStore) Update(studio *model.Studio) error { return nil } +// GetSceneCount returns the number of scenes associated with a studio +func (s *StudioStore) GetSceneCount(studioID int64) (int, error) { + var count int + err := s.db.conn.QueryRow(` + SELECT COUNT(*) FROM scenes WHERE studio_id = ? + `, studioID).Scan(&count) + + if err != nil { + return 0, fmt.Errorf("failed to count scenes: %w", err) + } + + return count, nil +} + // Delete deletes a studio by ID func (s *StudioStore) Delete(id int64) error { result, err := s.db.conn.Exec("DELETE FROM studios WHERE id = ?", id) @@ -142,3 +156,46 @@ func (s *StudioStore) Delete(id int64) error { return nil } + +// Upsert inserts or updates a studio based on source_id +func (s *StudioStore) Upsert(st *model.Studio) error { + // Try to find existing studio by source_id + existing, err := s.GetBySourceID(st.Source, st.SourceID) + if err == nil && existing != nil { + // Update existing + st.ID = existing.ID + return s.Update(st) + } + // Create new + return s.Create(st) +} + +// GetBySourceID retrieves a studio by its source and source_id +func (s *StudioStore) GetBySourceID(source, sourceID string) (*model.Studio, error) { + var st model.Studio + var createdAt, updatedAt string + + err := s.db.conn.QueryRow(` + SELECT id, name, COALESCE(parent_id, 0), COALESCE(image_path, ''), COALESCE(image_url, ''), + COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), + created_at, updated_at + FROM studios + WHERE source = ? AND source_id = ? + `, source, sourceID).Scan( + &st.ID, &st.Name, &st.ParentID, &st.ImagePath, &st.ImageURL, + &st.Description, &st.Source, &st.SourceID, + &createdAt, &updatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("failed to get studio: %w", err) + } + + st.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + st.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + return &st, nil +} diff --git a/internal/db/sync_store.go b/internal/db/sync_store.go new file mode 100644 index 0000000..b49797b --- /dev/null +++ b/internal/db/sync_store.go @@ -0,0 +1,192 @@ +package db + +import ( + "database/sql" + "fmt" + "time" +) + +// SyncMetadata represents sync tracking information +type SyncMetadata struct { + ID int64 + EntityType string + LastSyncAt time.Time + RecordsUpdated int + RecordsFailed int + Status string + ErrorMessage string + CreatedAt time.Time + UpdatedAt time.Time +} + +// SyncStore handles sync metadata operations +type SyncStore struct { + db *DB +} + +// NewSyncStore creates a new sync store +func NewSyncStore(db *DB) *SyncStore { + return &SyncStore{db: db} +} + +// GetLastSync retrieves the last sync metadata for an entity type +func (s *SyncStore) GetLastSync(entityType string) (*SyncMetadata, error) { + var meta SyncMetadata + var lastSyncAt, createdAt, updatedAt string + var errorMessage sql.NullString + + err := s.db.conn.QueryRow(` + SELECT id, entity_type, last_sync_at, records_updated, records_failed, + status, COALESCE(error_message, ''), created_at, updated_at + FROM sync_metadata + WHERE entity_type = ? + `, entityType).Scan( + &meta.ID, &meta.EntityType, &lastSyncAt, &meta.RecordsUpdated, + &meta.RecordsFailed, &meta.Status, &errorMessage, &createdAt, &updatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil // No sync record found + } + if err != nil { + return nil, fmt.Errorf("failed to get sync metadata: %w", err) + } + + meta.LastSyncAt, _ = time.Parse(time.RFC3339, lastSyncAt) + meta.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + meta.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + meta.ErrorMessage = errorMessage.String + + return &meta, nil +} + +// CanSync checks if enough time has passed since last sync +func (s *SyncStore) CanSync(entityType string, minInterval time.Duration) (bool, time.Time, error) { + meta, err := s.GetLastSync(entityType) + if err != nil { + return false, time.Time{}, err + } + + // No previous sync + if meta == nil { + return true, time.Time{}, nil + } + + // Check if minimum interval has passed + nextAllowed := meta.LastSyncAt.Add(minInterval) + if time.Now().Before(nextAllowed) { + return false, nextAllowed, nil + } + + return true, time.Time{}, nil +} + +// RecordSyncStart records the start of a sync operation +func (s *SyncStore) RecordSyncStart(entityType string) error { + now := time.Now() + + _, err := s.db.conn.Exec(` + INSERT INTO sync_metadata (entity_type, last_sync_at, status, records_updated, records_failed, created_at, updated_at) + VALUES (?, ?, 'running', 0, 0, ?, ?) + ON CONFLICT(entity_type) DO UPDATE SET + last_sync_at = excluded.last_sync_at, + status = 'running', + records_updated = 0, + records_failed = 0, + error_message = NULL, + updated_at = excluded.updated_at + `, entityType, now.Format(time.RFC3339), now.Format(time.RFC3339), now.Format(time.RFC3339)) + + if err != nil { + return fmt.Errorf("failed to record sync start: %w", err) + } + + return nil +} + +// RecordSyncComplete records the completion of a sync operation +func (s *SyncStore) RecordSyncComplete(entityType string, updated, failed int, errMsg string) error { + now := time.Now() + status := "completed" + if failed > 0 { + status = "completed_with_errors" + } + + var errorMessage *string + if errMsg != "" { + errorMessage = &errMsg + } + + _, err := s.db.conn.Exec(` + UPDATE sync_metadata + SET status = ?, + records_updated = ?, + records_failed = ?, + error_message = ?, + updated_at = ? + WHERE entity_type = ? + `, status, updated, failed, errorMessage, now.Format(time.RFC3339), entityType) + + if err != nil { + return fmt.Errorf("failed to record sync completion: %w", err) + } + + return nil +} + +// RecordSyncError records a sync operation failure +func (s *SyncStore) RecordSyncError(entityType string, errMsg string) error { + now := time.Now() + + _, err := s.db.conn.Exec(` + UPDATE sync_metadata + SET status = 'failed', + error_message = ?, + updated_at = ? + WHERE entity_type = ? + `, errMsg, now.Format(time.RFC3339), entityType) + + if err != nil { + return fmt.Errorf("failed to record sync error: %w", err) + } + + return nil +} + +// GetAllSyncStatus retrieves sync status for all entity types +func (s *SyncStore) GetAllSyncStatus() ([]SyncMetadata, error) { + rows, err := s.db.conn.Query(` + SELECT id, entity_type, last_sync_at, records_updated, records_failed, + status, COALESCE(error_message, ''), created_at, updated_at + FROM sync_metadata + ORDER BY entity_type + `) + if err != nil { + return nil, fmt.Errorf("failed to get sync status: %w", err) + } + defer rows.Close() + + var results []SyncMetadata + for rows.Next() { + var meta SyncMetadata + var lastSyncAt, createdAt, updatedAt string + var errorMessage sql.NullString + + err := rows.Scan( + &meta.ID, &meta.EntityType, &lastSyncAt, &meta.RecordsUpdated, + &meta.RecordsFailed, &meta.Status, &errorMessage, &createdAt, &updatedAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to scan sync metadata: %w", err) + } + + meta.LastSyncAt, _ = time.Parse(time.RFC3339, lastSyncAt) + meta.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + meta.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + meta.ErrorMessage = errorMessage.String + + results = append(results, meta) + } + + return results, nil +} diff --git a/internal/db/tag_store.go b/internal/db/tag_store.go index 4998d13..b5feaf5 100644 --- a/internal/db/tag_store.go +++ b/internal/db/tag_store.go @@ -25,9 +25,9 @@ func (s *TagStore) Create(tag *model.Tag) error { tag.UpdatedAt = now result, err := s.db.conn.Exec(` - INSERT INTO tags (name, source, source_id, created_at, updated_at) - VALUES (?, ?, ?, ?, ?) - `, tag.Name, tag.Source, tag.SourceID, tag.CreatedAt.Format(time.RFC3339), tag.UpdatedAt.Format(time.RFC3339)) + INSERT INTO tags (name, category_id, aliases, description, source, source_id, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + `, tag.Name, tag.CategoryID, tag.Aliases, tag.Description, tag.Source, tag.SourceID, tag.CreatedAt.Format(time.RFC3339), tag.UpdatedAt.Format(time.RFC3339)) if err != nil { return fmt.Errorf("failed to create tag: %w", err) @@ -48,9 +48,9 @@ func (s *TagStore) GetByID(id int64) (*model.Tag, error) { var createdAt, updatedAt string err := s.db.conn.QueryRow(` - SELECT id, name, source, source_id, created_at, updated_at + SELECT id, name, category_id, COALESCE(aliases, ''), COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM tags WHERE id = ? - `, id).Scan(&tag.ID, &tag.Name, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) + `, id).Scan(&tag.ID, &tag.Name, &tag.CategoryID, &tag.Aliases, &tag.Description, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) if err == sql.ErrNoRows { return nil, fmt.Errorf("tag not found") @@ -71,9 +71,9 @@ func (s *TagStore) GetByName(name string) (*model.Tag, error) { var createdAt, updatedAt string err := s.db.conn.QueryRow(` - SELECT id, name, source, source_id, created_at, updated_at + SELECT id, name, category_id, COALESCE(aliases, ''), COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM tags WHERE name = ? - `, name).Scan(&tag.ID, &tag.Name, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) + `, name).Scan(&tag.ID, &tag.Name, &tag.CategoryID, &tag.Aliases, &tag.Description, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) if err == sql.ErrNoRows { return nil, fmt.Errorf("tag not found") @@ -91,11 +91,11 @@ func (s *TagStore) GetByName(name string) (*model.Tag, error) { // Search searches for tags by name func (s *TagStore) Search(query string) ([]model.Tag, error) { rows, err := s.db.conn.Query(` - SELECT id, name, source, source_id, created_at, updated_at + SELECT id, name, category_id, COALESCE(aliases, ''), COALESCE(description, ''), COALESCE(source, ''), COALESCE(source_id, ''), created_at, updated_at FROM tags - WHERE name LIKE ? + WHERE name LIKE ? OR COALESCE(aliases, '') LIKE ? ORDER BY name - `, "%"+query+"%") + `, "%"+query+"%", "%"+query+"%") if err != nil { return nil, fmt.Errorf("failed to search tags: %w", err) @@ -107,7 +107,7 @@ func (s *TagStore) Search(query string) ([]model.Tag, error) { var tag model.Tag var createdAt, updatedAt string - err := rows.Scan(&tag.ID, &tag.Name, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) + err := rows.Scan(&tag.ID, &tag.Name, &tag.CategoryID, &tag.Aliases, &tag.Description, &tag.Source, &tag.SourceID, &createdAt, &updatedAt) if err != nil { return nil, fmt.Errorf("failed to scan tag: %w", err) } @@ -127,9 +127,9 @@ func (s *TagStore) Update(tag *model.Tag) error { result, err := s.db.conn.Exec(` UPDATE tags - SET name = ?, source = ?, source_id = ?, updated_at = ? + SET name = ?, category_id = ?, aliases = ?, description = ?, source = ?, source_id = ?, updated_at = ? WHERE id = ? - `, tag.Name, tag.Source, tag.SourceID, tag.UpdatedAt.Format(time.RFC3339), tag.ID) + `, tag.Name, tag.CategoryID, tag.Aliases, tag.Description, tag.Source, tag.SourceID, tag.UpdatedAt.Format(time.RFC3339), tag.ID) if err != nil { return fmt.Errorf("failed to update tag: %w", err) @@ -165,3 +165,46 @@ func (s *TagStore) Delete(id int64) error { return nil } + +// Upsert inserts or updates a tag based on source_id +func (s *TagStore) Upsert(tag *model.Tag) error { + // Try to find existing tag by source_id + existing, err := s.GetBySourceID(tag.Source, tag.SourceID) + if err == nil && existing != nil { + // Update existing + tag.ID = existing.ID + return s.Update(tag) + } + // Create new + return s.Create(tag) +} + +// GetBySourceID retrieves a tag by its source and source_id +func (s *TagStore) GetBySourceID(source, sourceID string) (*model.Tag, error) { + var tag model.Tag + var createdAt, updatedAt string + + err := s.db.conn.QueryRow(` + SELECT id, name, category_id, COALESCE(aliases, ''), COALESCE(description, ''), + COALESCE(source, ''), COALESCE(source_id, ''), + created_at, updated_at + FROM tags + WHERE source = ? AND source_id = ? + `, source, sourceID).Scan( + &tag.ID, &tag.Name, &tag.CategoryID, &tag.Aliases, &tag.Description, + &tag.Source, &tag.SourceID, + &createdAt, &updatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("failed to get tag: %w", err) + } + + tag.CreatedAt, _ = time.Parse(time.RFC3339, createdAt) + tag.UpdatedAt, _ = time.Parse(time.RFC3339, updatedAt) + + return &tag, nil +} diff --git a/internal/import/service.go b/internal/import/service.go new file mode 100644 index 0000000..84154dd --- /dev/null +++ b/internal/import/service.go @@ -0,0 +1,383 @@ +package import_service + +import ( + "context" + "fmt" + "log" + + "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" +) + +// ProgressUpdate represents a progress update during import +type ProgressUpdate struct { + EntityType string `json:"entity_type"` + Current int `json:"current"` + Total int `json:"total"` + Percent float64 `json:"percent"` + Message string `json:"message"` +} + +// ProgressCallback is called when progress is made +type ProgressCallback func(update ProgressUpdate) + +// Service handles bulk import operations +type Service struct { + db *db.DB + scraper *tpdb.Scraper +} + +// NewService creates a new import service +func NewService(database *db.DB, scraper *tpdb.Scraper) *Service { + return &Service{ + db: database, + scraper: scraper, + } +} + +// ImportResult contains the results of an import operation +type ImportResult struct { + EntityType string + Imported int + Failed int + Total int +} + +// BulkImportAllPerformers imports all performers from TPDB +func (s *Service) BulkImportAllPerformers(ctx context.Context) (*ImportResult, error) { + return s.BulkImportAllPerformersWithProgress(ctx, nil) +} + +// BulkImportAllPerformersWithProgress imports all performers from TPDB with progress updates +func (s *Service) BulkImportAllPerformersWithProgress(ctx context.Context, progress ProgressCallback) (*ImportResult, error) { + result := &ImportResult{ + EntityType: "performers", + } + + performerStore := db.NewPerformerStore(s.db) + + page := 1 + for { + performers, meta, err := s.scraper.ListPerformers(ctx, page) + if err != nil { + return result, fmt.Errorf("failed to fetch page %d: %w", page, err) + } + + // Update total on first page + if meta != nil && page == 1 { + result.Total = meta.Total + } + + // Import each performer + for _, performer := range performers { + if err := performerStore.Upsert(&performer); err != nil { + log.Printf("Failed to import performer %s: %v", performer.Name, err) + result.Failed++ + } else { + result.Imported++ + } + + // Send progress update + if progress != nil && result.Total > 0 { + progress(ProgressUpdate{ + EntityType: "performers", + Current: result.Imported, + Total: result.Total, + Percent: float64(result.Imported) / float64(result.Total) * 100, + Message: fmt.Sprintf("Imported %d/%d performers", result.Imported, result.Total), + }) + } + } + + log.Printf("Imported page %d/%d of performers (%d/%d total)", page, meta.LastPage, result.Imported, result.Total) + + // Check if we've reached the last page + if meta == nil || page >= meta.LastPage { + break + } + + page++ + } + + return result, nil +} + +// BulkImportAllStudios imports all studios from TPDB +func (s *Service) BulkImportAllStudios(ctx context.Context) (*ImportResult, error) { + return s.BulkImportAllStudiosWithProgress(ctx, nil) +} + +// BulkImportAllStudiosWithProgress imports all studios from TPDB with progress updates +func (s *Service) BulkImportAllStudiosWithProgress(ctx context.Context, progress ProgressCallback) (*ImportResult, error) { + result := &ImportResult{ + EntityType: "studios", + } + + studioStore := db.NewStudioStore(s.db) + + page := 1 + for { + studios, meta, err := s.scraper.ListStudios(ctx, page) + if err != nil { + return result, fmt.Errorf("failed to fetch page %d: %w", page, err) + } + + // Update total on first page + if meta != nil && page == 1 { + result.Total = meta.Total + } + + // Import each studio + for _, studio := range studios { + if err := studioStore.Upsert(&studio); err != nil { + log.Printf("Failed to import studio %s: %v", studio.Name, err) + result.Failed++ + } else { + result.Imported++ + } + + // Send progress update + if progress != nil && result.Total > 0 { + progress(ProgressUpdate{ + EntityType: "studios", + Current: result.Imported, + Total: result.Total, + Percent: float64(result.Imported) / float64(result.Total) * 100, + Message: fmt.Sprintf("Imported %d/%d studios", result.Imported, result.Total), + }) + } + } + + log.Printf("Imported page %d/%d of studios (%d/%d total)", page, meta.LastPage, result.Imported, result.Total) + + // Check if we've reached the last page + if meta == nil || page >= meta.LastPage { + break + } + + page++ + } + + return result, nil +} + +// BulkImportAllScenes imports all scenes from TPDB +func (s *Service) BulkImportAllScenes(ctx context.Context) (*ImportResult, error) { + return s.BulkImportAllScenesWithProgress(ctx, nil) +} + +// BulkImportAllScenesWithProgress imports all scenes from TPDB with progress updates +func (s *Service) BulkImportAllScenesWithProgress(ctx context.Context, progress ProgressCallback) (*ImportResult, error) { + result := &ImportResult{ + EntityType: "scenes", + } + + performerStore := db.NewPerformerStore(s.db) + studioStore := db.NewStudioStore(s.db) + sceneStore := db.NewSceneStore(s.db) + tagStore := db.NewTagStore(s.db) + + page := 1 + for { + scenes, meta, err := s.scraper.ListScenes(ctx, page) + if err != nil { + return result, fmt.Errorf("failed to fetch page %d: %w", page, err) + } + + // Update total on first page + if meta != nil && page == 1 { + result.Total = meta.Total + } + + // Import each scene with its performers and tags + for _, scene := range scenes { + // First import performers from the scene + for _, performer := range scene.Performers { + if err := performerStore.Upsert(&performer); err != nil { + log.Printf("Failed to import performer %s for scene %s: %v", performer.Name, scene.Title, err) + } + } + + // Import studio if present + if scene.Studio != nil { + if err := studioStore.Upsert(scene.Studio); err != nil { + log.Printf("Failed to import studio %s for scene %s: %v", scene.Studio.Name, scene.Title, err) + } + // Look up the studio ID + existingStudio, err := studioStore.GetBySourceID("tpdb", scene.Studio.SourceID) + if err == nil && existingStudio != nil { + scene.StudioID = &existingStudio.ID + } + } + + // Import tags + for _, tag := range scene.Tags { + if err := tagStore.Upsert(&tag); err != nil { + log.Printf("Failed to import tag %s for scene %s: %v", tag.Name, scene.Title, err) + } + } + + // Import the scene + if err := sceneStore.Upsert(&scene); err != nil { + log.Printf("Failed to import scene %s: %v", scene.Title, err) + result.Failed++ + continue + } + + // Get the scene ID + existingScene, err := sceneStore.GetBySourceID("tpdb", scene.SourceID) + if err != nil { + log.Printf("Failed to lookup scene %s after import: %v", scene.Title, err) + result.Failed++ + continue + } + + // Link performers to scene + for _, performer := range scene.Performers { + existingPerformer, err := performerStore.GetBySourceID("tpdb", performer.SourceID) + if err == nil && existingPerformer != nil { + if err := sceneStore.AddPerformer(existingScene.ID, existingPerformer.ID); err != nil { + log.Printf("Failed to link performer %s to scene %s: %v", performer.Name, scene.Title, err) + } + } + } + + // Link tags to scene + for _, tag := range scene.Tags { + existingTag, err := tagStore.GetBySourceID("tpdb", tag.SourceID) + if err == nil && existingTag != nil { + if err := sceneStore.AddTag(existingScene.ID, existingTag.ID); err != nil { + log.Printf("Failed to link tag %s to scene %s: %v", tag.Name, scene.Title, err) + } + } + } + + result.Imported++ + + // Send progress update + if progress != nil && result.Total > 0 { + progress(ProgressUpdate{ + EntityType: "scenes", + Current: result.Imported, + Total: result.Total, + Percent: float64(result.Imported) / float64(result.Total) * 100, + Message: fmt.Sprintf("Imported %d/%d scenes", result.Imported, result.Total), + }) + } + } + + log.Printf("Imported page %d/%d of scenes (%d/%d total)", page, meta.LastPage, result.Imported, result.Total) + + // Check if we've reached the last page + if meta == nil || page >= meta.LastPage { + break + } + + page++ + } + + return result, nil +} + +// BulkImportAll imports all data from TPDB (performers, studios, scenes) +func (s *Service) BulkImportAll(ctx context.Context) ([]ImportResult, error) { + var results []ImportResult + + log.Println("Starting bulk import of all TPDB data...") + + // Import performers first + log.Println("Importing performers...") + performerResult, err := s.BulkImportAllPerformers(ctx) + if err != nil { + return results, fmt.Errorf("failed to import performers: %w", err) + } + results = append(results, *performerResult) + + // Import studios + log.Println("Importing studios...") + studioResult, err := s.BulkImportAllStudios(ctx) + if err != nil { + return results, fmt.Errorf("failed to import studios: %w", err) + } + results = append(results, *studioResult) + + // Import scenes (with their performers and tags) + log.Println("Importing scenes...") + sceneResult, err := s.BulkImportAllScenes(ctx) + if err != nil { + return results, fmt.Errorf("failed to import scenes: %w", err) + } + results = append(results, *sceneResult) + + log.Println("Bulk import complete!") + + return results, nil +} + +// ImportScene imports a single scene with all its related data +func (s *Service) ImportScene(ctx context.Context, scene *model.Scene) error { + performerStore := db.NewPerformerStore(s.db) + studioStore := db.NewStudioStore(s.db) + sceneStore := db.NewSceneStore(s.db) + tagStore := db.NewTagStore(s.db) + + // Import performers first + for _, performer := range scene.Performers { + if err := performerStore.Upsert(&performer); err != nil { + return fmt.Errorf("failed to import performer %s: %w", performer.Name, err) + } + } + + // Import tags + for _, tag := range scene.Tags { + if err := tagStore.Upsert(&tag); err != nil { + return fmt.Errorf("failed to import tag %s: %w", tag.Name, err) + } + } + + // Import studio if present + if scene.Studio != nil { + if err := studioStore.Upsert(scene.Studio); err != nil { + return fmt.Errorf("failed to import studio %s: %w", scene.Studio.Name, err) + } + // Look up the studio ID + existingStudio, err := studioStore.GetBySourceID("tpdb", scene.Studio.SourceID) + if err == nil && existingStudio != nil { + scene.StudioID = &existingStudio.ID + } + } + + // Import the scene + if err := sceneStore.Upsert(scene); err != nil { + return fmt.Errorf("failed to import scene: %w", err) + } + + // Get the scene ID + existingScene, err := sceneStore.GetBySourceID("tpdb", scene.SourceID) + if err != nil { + return fmt.Errorf("failed to lookup scene after import: %w", err) + } + + // Link performers to scene + for _, performer := range scene.Performers { + existingPerformer, err := performerStore.GetBySourceID("tpdb", performer.SourceID) + if err == nil && existingPerformer != nil { + if err := sceneStore.AddPerformer(existingScene.ID, existingPerformer.ID); err != nil { + return fmt.Errorf("failed to link performer %s: %w", performer.Name, err) + } + } + } + + // Link tags to scene + for _, tag := range scene.Tags { + existingTag, err := tagStore.GetBySourceID("tpdb", tag.SourceID) + if err == nil && existingTag != nil { + if err := sceneStore.AddTag(existingScene.ID, existingTag.ID); err != nil { + return fmt.Errorf("failed to link tag %s: %w", tag.Name, err) + } + } + } + + return nil +} diff --git a/internal/model/movie.go b/internal/model/movie.go new file mode 100644 index 0000000..2b21873 --- /dev/null +++ b/internal/model/movie.go @@ -0,0 +1,28 @@ +package model + +import "time" + +// Movie represents a full-length movie/DVD +type Movie struct { + ID int64 `json:"id"` + Title string `json:"title"` + Date string `json:"date,omitempty"` // Release date + StudioID *int64 `json:"studio_id,omitempty"` + Description string `json:"description,omitempty"` + Director string `json:"director,omitempty"` + Duration int `json:"duration,omitempty"` // Duration in minutes + ImagePath string `json:"image_path,omitempty"` + ImageURL string `json:"image_url,omitempty"` + BackImageURL string `json:"back_image_url,omitempty"` // Back cover + URL string `json:"url,omitempty"` + Source string `json:"source,omitempty"` + SourceID string `json:"source_id,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + + // Relationships + Scenes []Scene `json:"scenes,omitempty"` + Performers []Performer `json:"performers,omitempty"` + Tags []Tag `json:"tags,omitempty"` + Studio *Studio `json:"studio,omitempty"` +} diff --git a/internal/model/tag.go b/internal/model/tag.go index 2f5f0fd..1ce56b5 100644 --- a/internal/model/tag.go +++ b/internal/model/tag.go @@ -2,12 +2,58 @@ package model import "time" -// Tag represents a content tag/category +// TagCategory represents a hierarchical tag category +type TagCategory struct { + ID int64 `json:"id"` + Name string `json:"name"` + ParentID int64 `json:"parent_id,omitempty"` + Description string `json:"description,omitempty"` + CreatedAt time.Time `json:"created_at"` +} + +// Tag represents a content tag with category type Tag struct { + ID int64 `json:"id"` + Name string `json:"name"` + CategoryID int64 `json:"category_id"` + Aliases string `json:"aliases,omitempty"` + Description string `json:"description,omitempty"` + Source string `json:"source,omitempty"` + SourceID string `json:"source_id,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +// SceneTag represents a tag applied to a scene with ML support +type SceneTag struct { + SceneID int64 `json:"scene_id"` + TagID int64 `json:"tag_id"` + Confidence float64 `json:"confidence"` + Source string `json:"source"` + Verified bool `json:"verified"` + CreatedAt time.Time `json:"created_at"` +} + +// SceneImage represents an image associated with a scene +type SceneImage struct { ID int64 `json:"id"` - Name string `json:"name"` + SceneID int64 `json:"scene_id"` + ImageURL string `json:"image_url"` + ImagePath string `json:"image_path,omitempty"` Source string `json:"source,omitempty"` SourceID string `json:"source_id,omitempty"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` + FileSize int64 `json:"file_size,omitempty"` CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` +} + +// MLPrediction represents an ML model's tag predictions +type MLPrediction struct { + ID int64 `json:"id"` + SceneID int64 `json:"scene_id,omitempty"` + ImageID int64 `json:"image_id,omitempty"` + ModelVersion string `json:"model_version"` + Predictions string `json:"predictions"` // JSON array of {tag_id, confidence} + CreatedAt time.Time `json:"created_at"` } diff --git a/internal/scraper/adultemp/client.go b/internal/scraper/adultemp/client.go new file mode 100644 index 0000000..ea8bb0b --- /dev/null +++ b/internal/scraper/adultemp/client.go @@ -0,0 +1,187 @@ +package adultemp + +import ( + "context" + "fmt" + "io" + "net/http" + "net/http/cookiejar" + "net/url" + "time" + + "golang.org/x/net/publicsuffix" +) + +// Client handles HTTP requests to Adult Empire +type Client struct { + httpClient *http.Client + baseURL string + userAgent string +} + +// NewClient creates a new Adult Empire client +func NewClient() (*Client, error) { + // Create cookie jar for session management + jar, err := cookiejar.New(&cookiejar.Options{ + PublicSuffixList: publicsuffix.List, + }) + if err != nil { + return nil, fmt.Errorf("failed to create cookie jar: %w", err) + } + + client := &http.Client{ + Jar: jar, + Timeout: 30 * time.Second, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + // Allow up to 10 redirects + if len(via) >= 10 { + return fmt.Errorf("stopped after 10 redirects") + } + return nil + }, + } + + c := &Client{ + httpClient: client, + baseURL: "https://www.adultempire.com", + userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", + } + + // Set age confirmation cookie by default + if err := c.setAgeConfirmation(); err != nil { + return nil, fmt.Errorf("failed to set age confirmation: %w", err) + } + + return c, nil +} + +// setAgeConfirmation sets the age confirmation cookie required to view Adult Empire content +func (c *Client) setAgeConfirmation() error { + u, err := url.Parse(c.baseURL) + if err != nil { + return err + } + + cookies := []*http.Cookie{ + { + Name: "ageConfirmed", + Value: "1", + Domain: ".adultempire.com", + Path: "/", + }, + } + + c.httpClient.Jar.SetCookies(u, cookies) + return nil +} + +// SetAuthToken sets the authentication token for Adult Empire +// etoken is the session cookie from an authenticated Adult Empire session +func (c *Client) SetAuthToken(etoken string) error { + u, err := url.Parse(c.baseURL) + if err != nil { + return err + } + + // Set the etoken cookie + cookies := []*http.Cookie{ + { + Name: "etoken", + Value: etoken, + Domain: ".adultempire.com", + Path: "/", + }, + { + Name: "ageConfirmed", + Value: "1", + Domain: ".adultempire.com", + Path: "/", + }, + } + + c.httpClient.Jar.SetCookies(u, cookies) + return nil +} + +// Get performs a GET request to the specified path +func (c *Client) Get(ctx context.Context, path string) ([]byte, error) { + fullURL := c.baseURL + path + + req, err := http.NewRequestWithContext(ctx, "GET", fullURL, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("User-Agent", c.userAgent) + req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") + req.Header.Set("Accept-Language", "en-US,en;q=0.5") + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("request failed: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response body: %w", err) + } + + return body, nil +} + +// SearchScenes searches for scenes by query +func (c *Client) SearchScenes(ctx context.Context, query string) ([]byte, error) { + // Adult Empire search URL format + searchPath := fmt.Sprintf("/dvd/search?q=%s", url.QueryEscape(query)) + return c.Get(ctx, searchPath) +} + +// SearchPerformers searches for performers by name +func (c *Client) SearchPerformers(ctx context.Context, name string) ([]byte, error) { + // Adult Empire performer search + searchPath := fmt.Sprintf("/performer/search?q=%s", url.QueryEscape(name)) + return c.Get(ctx, searchPath) +} + +// GetSceneByURL fetches a scene page by its URL +func (c *Client) GetSceneByURL(ctx context.Context, sceneURL string) ([]byte, error) { + // Parse the URL to get just the path + u, err := url.Parse(sceneURL) + if err != nil { + return nil, fmt.Errorf("invalid URL: %w", err) + } + + // If it's a full URL, use the path; otherwise use as-is + path := sceneURL + if u.Host != "" { + path = u.Path + if u.RawQuery != "" { + path += "?" + u.RawQuery + } + } + + return c.Get(ctx, path) +} + +// GetPerformerByURL fetches a performer page by its URL +func (c *Client) GetPerformerByURL(ctx context.Context, performerURL string) ([]byte, error) { + u, err := url.Parse(performerURL) + if err != nil { + return nil, fmt.Errorf("invalid URL: %w", err) + } + + path := performerURL + if u.Host != "" { + path = u.Path + if u.RawQuery != "" { + path += "?" + u.RawQuery + } + } + + return c.Get(ctx, path) +} diff --git a/internal/scraper/adultemp/scraper.go b/internal/scraper/adultemp/scraper.go new file mode 100644 index 0000000..cac27cf --- /dev/null +++ b/internal/scraper/adultemp/scraper.go @@ -0,0 +1,309 @@ +package adultemp + +import ( + "context" + "fmt" + "strings" + + "git.leaktechnologies.dev/stu/Goondex/internal/model" +) + +// Scraper implements Adult Empire scraping functionality +type Scraper struct { + client *Client +} + +// NewScraper creates a new Adult Empire scraper +func NewScraper() (*Scraper, error) { + client, err := NewClient() + if err != nil { + return nil, err + } + + return &Scraper{ + client: client, + }, nil +} + +// SetAuthToken sets the authentication token for the scraper +func (s *Scraper) SetAuthToken(etoken string) error { + return s.client.SetAuthToken(etoken) +} + +// ScrapeSceneByURL scrapes a scene from its Adult Empire URL +func (s *Scraper) ScrapeSceneByURL(ctx context.Context, url string) (*SceneData, error) { + html, err := s.client.GetSceneByURL(ctx, url) + if err != nil { + return nil, fmt.Errorf("failed to fetch scene: %w", err) + } + + parser, err := NewXPathParser(html) + if err != nil { + return nil, fmt.Errorf("failed to parse HTML: %w", err) + } + + scene := &SceneData{ + URL: url, + } + + // Extract title + scene.Title = parser.QueryString("//h1[@class='title']") + + // Extract date + dateStr := parser.QueryString("//div[@class='release-date']/text()") + scene.Date = ParseDate(dateStr) + + // Extract studio + scene.Studio = parser.QueryString("//a[contains(@href, '/studio/')]/text()") + + // Extract cover image + scene.Image = ExtractURL( + parser.QueryAttr("//div[@class='item-image']//img", "src"), + s.client.baseURL, + ) + + // Extract description + desc := parser.QueryString("//div[@class='synopsis']") + scene.Description = CleanText(desc) + + // Extract performers + scene.Performers = parser.QueryStrings("//a[contains(@href, '/performer/')]/text()") + + // Extract tags/categories + scene.Tags = parser.QueryStrings("//a[contains(@href, '/category/')]/text()") + + // Extract code/SKU + scene.Code = parser.QueryString("//span[@class='sku']/text()") + + // Extract director + scene.Director = parser.QueryString("//a[contains(@href, '/director/')]/text()") + + return scene, nil +} + +// SearchScenesByName searches for scenes by title +func (s *Scraper) SearchScenesByName(ctx context.Context, query string) ([]SearchResult, error) { + html, err := s.client.SearchScenes(ctx, query) + if err != nil { + return nil, fmt.Errorf("failed to search scenes: %w", err) + } + + parser, err := NewXPathParser(html) + if err != nil { + return nil, fmt.Errorf("failed to parse HTML: %w", err) + } + + var results []SearchResult + + // Extract search result items using official Stash scraper XPath + // Title: //a[@class="boxcover"]/img/@title + // URL: //a[@class="boxcover"]/@href + // Image: //a[@class="boxcover"]/img/@src + titles := parser.QueryAttrs("//a[@class='boxcover']/img", "title") + urls := parser.QueryAttrs("//a[@class='boxcover']", "href") + images := parser.QueryAttrs("//a[@class='boxcover']/img", "src") + + for i := range titles { + result := SearchResult{ + Title: titles[i], + } + + if i < len(urls) { + result.URL = ExtractURL(urls[i], s.client.baseURL) + } + + if i < len(images) { + result.Image = ExtractURL(images[i], s.client.baseURL) + } + + results = append(results, result) + } + + return results, nil +} + +// ScrapePerformerByURL scrapes a performer from their Adult Empire URL +func (s *Scraper) ScrapePerformerByURL(ctx context.Context, url string) (*PerformerData, error) { + html, err := s.client.GetPerformerByURL(ctx, url) + if err != nil { + return nil, fmt.Errorf("failed to fetch performer: %w", err) + } + + parser, err := NewXPathParser(html) + if err != nil { + return nil, fmt.Errorf("failed to parse HTML: %w", err) + } + + performer := &PerformerData{ + URL: url, + } + + // Extract name + performer.Name = parser.QueryString("//h1[@class='performer-name']") + + // Extract image + performer.Image = ExtractURL( + parser.QueryAttr("//div[@class='performer-image']//img", "src"), + s.client.baseURL, + ) + + // Extract birthdate + performer.Birthdate = parser.QueryString("//span[@class='birthdate']/text()") + + // Extract ethnicity + performer.Ethnicity = parser.QueryString("//span[@class='ethnicity']/text()") + + // Extract country + performer.Country = parser.QueryString("//span[@class='country']/text()") + + // Extract height + heightStr := parser.QueryString("//span[@class='height']/text()") + if heightStr != "" { + height := ParseHeight(heightStr) + if height > 0 { + performer.Height = fmt.Sprintf("%d cm", height) + } + } + + // Extract measurements + performer.Measurements = parser.QueryString("//span[@class='measurements']/text()") + + // Extract hair color + performer.HairColor = parser.QueryString("//span[@class='hair-color']/text()") + + // Extract eye color + performer.EyeColor = parser.QueryString("//span[@class='eye-color']/text()") + + // Extract biography + bio := parser.QueryString("//div[@class='bio']") + performer.Biography = CleanText(bio) + + // Extract aliases + aliasStr := parser.QueryString("//span[@class='aliases']/text()") + if aliasStr != "" { + // Split by comma + for _, alias := range splitByComma(aliasStr) { + performer.Aliases = append(performer.Aliases, alias) + } + } + + return performer, nil +} + +// SearchPerformersByName searches for performers by name +func (s *Scraper) SearchPerformersByName(ctx context.Context, name string) ([]SearchResult, error) { + html, err := s.client.SearchPerformers(ctx, name) + if err != nil { + return nil, fmt.Errorf("failed to search performers: %w", err) + } + + parser, err := NewXPathParser(html) + if err != nil { + return nil, fmt.Errorf("failed to parse HTML: %w", err) + } + + var results []SearchResult + + // Extract performer search results using official Stash scraper XPath + // Root: //div[@id="performerlist"]//a + // Name: @label attribute + // URL: @href attribute + names := parser.QueryAttrs("//div[@id='performerlist']//a", "label") + urls := parser.QueryAttrs("//div[@id='performerlist']//a", "href") + images := parser.QueryAttrs("//div[@id='performerlist']//a//img", "src") + + for i := range names { + result := SearchResult{ + Title: names[i], + } + + if i < len(urls) { + result.URL = ExtractURL(urls[i], s.client.baseURL) + } + + if i < len(images) { + result.Image = ExtractURL(images[i], s.client.baseURL) + } + + results = append(results, result) + } + + return results, nil +} + +// ConvertSceneToModel converts SceneData to Goondex model.Scene +func (s *Scraper) ConvertSceneToModel(data *SceneData) *model.Scene { + scene := &model.Scene{ + Title: data.Title, + URL: data.URL, + Date: data.Date, + Description: data.Description, + ImageURL: data.Image, + Code: data.Code, + Director: data.Director, + Source: "adultemp", + SourceID: ExtractID(data.URL), + } + + // Studio will need to be looked up/created separately + // Performers will need to be looked up/created separately + // Tags will need to be looked up/created separately + + return scene +} + +// ConvertPerformerToModel converts PerformerData to Goondex model.Performer +func (s *Scraper) ConvertPerformerToModel(data *PerformerData) *model.Performer { + performer := &model.Performer{ + Name: data.Name, + ImageURL: data.Image, + Birthday: data.Birthdate, + Ethnicity: data.Ethnicity, + Country: data.Country, + Measurements: data.Measurements, + HairColor: data.HairColor, + EyeColor: data.EyeColor, + Bio: data.Biography, + Source: "adultemp", + SourceID: ExtractID(data.URL), + } + + // Parse height if available + if data.Height != "" { + height := ParseHeight(data.Height) + if height > 0 { + performer.Height = height + } + } + + // Join aliases + if len(data.Aliases) > 0 { + performer.Aliases = joinStrings(data.Aliases, ", ") + } + + return performer +} + +// Helper functions + +func splitByComma(s string) []string { + var result []string + parts := strings.Split(s, ",") + for _, part := range parts { + trimmed := strings.TrimSpace(part) + if trimmed != "" { + result = append(result, trimmed) + } + } + return result +} + +func joinStrings(strs []string, sep string) string { + var nonEmpty []string + for _, s := range strs { + if s != "" { + nonEmpty = append(nonEmpty, s) + } + } + return strings.Join(nonEmpty, sep) +} diff --git a/internal/scraper/adultemp/types.go b/internal/scraper/adultemp/types.go new file mode 100644 index 0000000..c40ef66 --- /dev/null +++ b/internal/scraper/adultemp/types.go @@ -0,0 +1,58 @@ +package adultemp + +// AdultEmpire scraper types and structures +// Based on the Stash Adult Empire scraper implementation + +// SearchResult represents a search result from Adult Empire +type SearchResult struct { + Title string + URL string + Image string + Year string +} + +// SceneData represents a scene scraped from Adult Empire +type SceneData struct { + Title string + URL string + Date string + Studio string + Image string + Description string + Performers []string + Tags []string + Code string + Director string +} + +// PerformerData represents a performer scraped from Adult Empire +type PerformerData struct { + Name string + URL string + Image string + Birthdate string + Ethnicity string + Country string + Height string + Measurements string + HairColor string + EyeColor string + Biography string + Aliases []string +} + +// MovieData represents a movie/group from Adult Empire +type MovieData struct { + Title string + URL string + Date string + Studio string + FrontImage string + BackImage string + Description string + Director string + Duration string + Performers []string + Tags []string + Code string +} diff --git a/internal/scraper/adultemp/xpath.go b/internal/scraper/adultemp/xpath.go new file mode 100644 index 0000000..93005c7 --- /dev/null +++ b/internal/scraper/adultemp/xpath.go @@ -0,0 +1,185 @@ +package adultemp + +import ( + "bytes" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/antchfx/htmlquery" + "golang.org/x/net/html" +) + +// XPathParser handles XPath parsing of Adult Empire pages +type XPathParser struct { + doc *html.Node +} + +// NewXPathParser creates a new XPath parser from HTML bytes +func NewXPathParser(htmlContent []byte) (*XPathParser, error) { + doc, err := htmlquery.Parse(bytes.NewReader(htmlContent)) + if err != nil { + return nil, fmt.Errorf("failed to parse HTML: %w", err) + } + + return &XPathParser{doc: doc}, nil +} + +// QueryString extracts a single string value using XPath +func (p *XPathParser) QueryString(xpath string) string { + node := htmlquery.FindOne(p.doc, xpath) + if node == nil { + return "" + } + return strings.TrimSpace(htmlquery.InnerText(node)) +} + +// QueryAttr extracts an attribute value using XPath +func (p *XPathParser) QueryAttr(xpath, attr string) string { + node := htmlquery.FindOne(p.doc, xpath) + if node == nil { + return "" + } + for _, a := range node.Attr { + if a.Key == attr { + return strings.TrimSpace(a.Val) + } + } + return "" +} + +// QueryStrings extracts multiple string values using XPath +func (p *XPathParser) QueryStrings(xpath string) []string { + nodes := htmlquery.Find(p.doc, xpath) + var results []string + for _, node := range nodes { + text := strings.TrimSpace(htmlquery.InnerText(node)) + if text != "" { + results = append(results, text) + } + } + return results +} + +// QueryAttrs extracts multiple attribute values using XPath +func (p *XPathParser) QueryAttrs(xpath, attr string) []string { + nodes := htmlquery.Find(p.doc, xpath) + var results []string + for _, node := range nodes { + for _, a := range node.Attr { + if a.Key == attr { + val := strings.TrimSpace(a.Val) + if val != "" { + results = append(results, val) + } + break + } + } + } + return results +} + +// Helper functions for common parsing tasks + +// ParseDate converts various date formats to YYYY-MM-DD +func ParseDate(dateStr string) string { + dateStr = strings.TrimSpace(dateStr) + if dateStr == "" { + return "" + } + + // Try to extract date in various formats + // Format: "Jan 02, 2006" -> "2006-01-02" + // Format: "2006-01-02" -> "2006-01-02" + + // If already in YYYY-MM-DD format + if matched, _ := regexp.MatchString(`^\d{4}-\d{2}-\d{2}$`, dateStr); matched { + return dateStr + } + + // Common Adult Empire format: "Jan 02, 2006" + // We'll return it as-is for now and let the caller handle conversion + return dateStr +} + +// ParseHeight converts height strings to centimeters +// Example: "5'6\"" -> "168" +func ParseHeight(heightStr string) int { + heightStr = strings.TrimSpace(heightStr) + if heightStr == "" { + return 0 + } + + // Parse feet and inches + re := regexp.MustCompile(`(\d+)'(\d+)"?`) + matches := re.FindStringSubmatch(heightStr) + if len(matches) == 3 { + feet, _ := strconv.Atoi(matches[1]) + inches, _ := strconv.Atoi(matches[2]) + totalInches := feet*12 + inches + cm := int(float64(totalInches) * 2.54) + return cm + } + + // Try to extract just a number with "cm" + if strings.Contains(heightStr, "cm") { + re = regexp.MustCompile(`(\d+)`) + matches = re.FindStringSubmatch(heightStr) + if len(matches) > 0 { + cm, _ := strconv.Atoi(matches[0]) + return cm + } + } + + return 0 +} + +// CleanText removes "Show More/Less" text and extra whitespace +func CleanText(text string) string { + text = strings.TrimSpace(text) + + // Remove "Show More" / "Show Less" buttons + text = regexp.MustCompile(`(?i)show\s+(more|less)`).ReplaceAllString(text, "") + + // Remove extra whitespace + text = regexp.MustCompile(`\s+`).ReplaceAllString(text, " ") + + return strings.TrimSpace(text) +} + +// ExtractURL ensures a URL is complete +func ExtractURL(rawURL, baseURL string) string { + rawURL = strings.TrimSpace(rawURL) + if rawURL == "" { + return "" + } + + // If it's already a full URL + if strings.HasPrefix(rawURL, "http://") || strings.HasPrefix(rawURL, "https://") { + return rawURL + } + + // If it starts with //, add https: + if strings.HasPrefix(rawURL, "//") { + return "https:" + rawURL + } + + // If it's a relative path, prepend base URL + if strings.HasPrefix(rawURL, "/") { + return baseURL + rawURL + } + + return rawURL +} + +// ExtractID extracts numeric ID from URL +// Example: "/123456/scene-name" -> "123456" +func ExtractID(urlPath string) string { + re := regexp.MustCompile(`/(\d+)/`) + matches := re.FindStringSubmatch(urlPath) + if len(matches) > 1 { + return matches[1] + } + return "" +} diff --git a/internal/scraper/merger/performer_merger.go b/internal/scraper/merger/performer_merger.go new file mode 100644 index 0000000..87a1711 --- /dev/null +++ b/internal/scraper/merger/performer_merger.go @@ -0,0 +1,129 @@ +package merger + +import ( + "fmt" + "strings" + + "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" +) + +// MergePerformerData intelligently combines data from multiple sources +// Priority: TPDB data is primary, Adult Empire fills in gaps or provides additional context +func MergePerformerData(tpdbPerformer *model.Performer, adultempData *adultemp.PerformerData) *model.Performer { + merged := tpdbPerformer + + // Fill in missing fields from Adult Empire + if merged.Birthday == "" && adultempData.Birthdate != "" { + merged.Birthday = adultempData.Birthdate + } + + if merged.Ethnicity == "" && adultempData.Ethnicity != "" { + merged.Ethnicity = adultempData.Ethnicity + } + + if merged.Country == "" && adultempData.Country != "" { + merged.Country = adultempData.Country + } + + if merged.HairColor == "" && adultempData.HairColor != "" { + merged.HairColor = adultempData.HairColor + } + + if merged.EyeColor == "" && adultempData.EyeColor != "" { + merged.EyeColor = adultempData.EyeColor + } + + if merged.Measurements == "" && adultempData.Measurements != "" { + merged.Measurements = adultempData.Measurements + } + + // Height: prefer TPDB if available, otherwise use Adult Empire + if merged.Height == 0 && adultempData.Height != "" { + // Parse height from Adult Empire format (e.g., "168 cm") + // This is already converted by the Adult Empire scraper + // We just need to extract the numeric value + var height int + if _, err := fmt.Sscanf(adultempData.Height, "%d cm", &height); err == nil { + merged.Height = height + } + } + + // Bio: Combine if both exist, otherwise use whichever is available + if merged.Bio == "" && adultempData.Biography != "" { + merged.Bio = adultempData.Biography + } else if merged.Bio != "" && adultempData.Biography != "" { + // If both exist and are different, append Adult Empire bio + if !strings.Contains(merged.Bio, adultempData.Biography) { + merged.Bio = merged.Bio + "\n\n[Adult Empire]: " + adultempData.Biography + } + } + + // Aliases: Merge unique aliases + if len(adultempData.Aliases) > 0 { + aliasesStr := strings.Join(adultempData.Aliases, ", ") + if merged.Aliases == "" { + merged.Aliases = aliasesStr + } else { + // Add new aliases that aren't already present + existingAliases := strings.Split(merged.Aliases, ",") + existingMap := make(map[string]bool) + for _, alias := range existingAliases { + existingMap[strings.TrimSpace(alias)] = true + } + + for _, newAlias := range adultempData.Aliases { + trimmed := strings.TrimSpace(newAlias) + if !existingMap[trimmed] { + merged.Aliases += ", " + trimmed + } + } + } + } + + // Image URL: prefer TPDB, but keep Adult Empire as fallback reference + // We don't override TPDB images as they're generally higher quality + if merged.ImageURL == "" && adultempData.Image != "" { + merged.ImageURL = adultempData.Image + } + + return merged +} + +// ShouldMerge determines if two performers are likely the same person +// Returns true if names match closely enough +func ShouldMerge(performer1Name, performer2Name string) bool { + name1 := strings.ToLower(strings.TrimSpace(performer1Name)) + name2 := strings.ToLower(strings.TrimSpace(performer2Name)) + + // Exact match + if name1 == name2 { + return true + } + + // Check if one name is contained in the other + // (e.g., "Riley Reid" and "Riley Red" should not match, + // but "Riley Reid" and "Reid, Riley" should) + words1 := strings.Fields(name1) + words2 := strings.Fields(name2) + + // If all words from one name are in the other, consider it a match + matchCount := 0 + for _, word1 := range words1 { + for _, word2 := range words2 { + if word1 == word2 { + matchCount++ + break + } + } + } + + // At least 70% of words must match + threshold := 0.7 + maxWords := len(words1) + if len(words2) > maxWords { + maxWords = len(words2) + } + + return float64(matchCount)/float64(maxWords) >= threshold +} diff --git a/internal/scraper/tpdb/scraper.go b/internal/scraper/tpdb/scraper.go index 3cb93f4..1c897ee 100644 --- a/internal/scraper/tpdb/scraper.go +++ b/internal/scraper/tpdb/scraper.go @@ -172,3 +172,87 @@ func (s *Scraper) GetStudioByID(ctx context.Context, remoteID string) (*model.St studio := mapStudio(tpdbStudio) return &studio, nil } + +// ListPerformers fetches all performers with pagination +func (s *Scraper) ListPerformers(ctx context.Context, page int) ([]model.Performer, *MetaData, error) { + params := url.Values{} + params.Set("page", fmt.Sprintf("%d", page)) + + body, err := s.client.get(ctx, "/performers", params) + if err != nil { + return nil, nil, fmt.Errorf("failed to list performers: %w", err) + } + + var apiResp APIResponse + if err := json.Unmarshal(body, &apiResp); err != nil { + return nil, nil, fmt.Errorf("failed to parse response: %w", err) + } + + var tpdbPerformers []PerformerResponse + if err := json.Unmarshal(apiResp.Data, &tpdbPerformers); err != nil { + return nil, nil, fmt.Errorf("failed to parse performers: %w", err) + } + + performers := make([]model.Performer, 0, len(tpdbPerformers)) + for _, p := range tpdbPerformers { + performers = append(performers, mapPerformer(p)) + } + + return performers, apiResp.Meta, nil +} + +// ListStudios fetches all studios with pagination +func (s *Scraper) ListStudios(ctx context.Context, page int) ([]model.Studio, *MetaData, error) { + params := url.Values{} + params.Set("page", fmt.Sprintf("%d", page)) + + body, err := s.client.get(ctx, "/sites", params) + if err != nil { + return nil, nil, fmt.Errorf("failed to list studios: %w", err) + } + + var apiResp APIResponse + if err := json.Unmarshal(body, &apiResp); err != nil { + return nil, nil, fmt.Errorf("failed to parse response: %w", err) + } + + var tpdbStudios []StudioResponse + if err := json.Unmarshal(apiResp.Data, &tpdbStudios); err != nil { + return nil, nil, fmt.Errorf("failed to parse studios: %w", err) + } + + studios := make([]model.Studio, 0, len(tpdbStudios)) + for _, st := range tpdbStudios { + studios = append(studios, mapStudio(st)) + } + + return studios, apiResp.Meta, nil +} + +// ListScenes fetches all scenes with pagination +func (s *Scraper) ListScenes(ctx context.Context, page int) ([]model.Scene, *MetaData, error) { + params := url.Values{} + params.Set("page", fmt.Sprintf("%d", page)) + + body, err := s.client.get(ctx, "/scenes", params) + if err != nil { + return nil, nil, fmt.Errorf("failed to list scenes: %w", err) + } + + var apiResp APIResponse + if err := json.Unmarshal(body, &apiResp); err != nil { + return nil, nil, fmt.Errorf("failed to parse response: %w", err) + } + + var tpdbScenes []SceneResponse + if err := json.Unmarshal(apiResp.Data, &tpdbScenes); err != nil { + return nil, nil, fmt.Errorf("failed to parse scenes: %w", err) + } + + scenes := make([]model.Scene, 0, len(tpdbScenes)) + for _, sc := range tpdbScenes { + scenes = append(scenes, mapScene(sc)) + } + + return scenes, apiResp.Meta, nil +} diff --git a/internal/sync/service.go b/internal/sync/service.go new file mode 100644 index 0000000..b153f93 --- /dev/null +++ b/internal/sync/service.go @@ -0,0 +1,295 @@ +package sync + +import ( + "context" + "fmt" + "time" + + "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" +) + +// Service handles synchronization operations +type Service struct { + db *db.DB + scraper *tpdb.Scraper +} + +// NewService creates a new sync service +func NewService(database *db.DB, scraper *tpdb.Scraper) *Service { + return &Service{ + db: database, + scraper: scraper, + } +} + +// SyncOptions configures sync behavior +type SyncOptions struct { + Force bool // Force sync even if rate limit not met + MinInterval time.Duration // Minimum time between syncs +} + +// DefaultSyncOptions returns default sync options (1 hour minimum) +func DefaultSyncOptions() SyncOptions { + return SyncOptions{ + Force: false, + MinInterval: 1 * time.Hour, + } +} + +// SyncResult contains the results of a sync operation +type SyncResult struct { + EntityType string + Updated int + Failed int + Skipped int + Duration time.Duration + ErrorMessage string +} + +// SyncAll syncs all entity types (performers, studios, scenes) +func (s *Service) SyncAll(ctx context.Context, opts SyncOptions) ([]SyncResult, error) { + var results []SyncResult + + // Sync performers + performerResult, err := s.SyncPerformers(ctx, opts) + if err != nil { + return results, fmt.Errorf("failed to sync performers: %w", err) + } + results = append(results, performerResult) + + // Sync studios + studioResult, err := s.SyncStudios(ctx, opts) + if err != nil { + return results, fmt.Errorf("failed to sync studios: %w", err) + } + results = append(results, studioResult) + + // Sync scenes + sceneResult, err := s.SyncScenes(ctx, opts) + if err != nil { + return results, fmt.Errorf("failed to sync scenes: %w", err) + } + results = append(results, sceneResult) + + return results, nil +} + +// SyncPerformers syncs all performers from TPDB +func (s *Service) SyncPerformers(ctx context.Context, opts SyncOptions) (SyncResult, error) { + result := SyncResult{EntityType: "performers"} + start := time.Now() + defer func() { result.Duration = time.Since(start) }() + + syncStore := db.NewSyncStore(s.db) + + // Check rate limiting + if !opts.Force { + canSync, nextAllowed, err := syncStore.CanSync("performers", opts.MinInterval) + if err != nil { + return result, err + } + if !canSync { + result.Skipped = 1 + result.ErrorMessage = fmt.Sprintf("Rate limit: next sync allowed at %s", nextAllowed.Format(time.RFC3339)) + return result, nil + } + } + + // Record sync start + if err := syncStore.RecordSyncStart("performers"); err != nil { + return result, err + } + + performerStore := db.NewPerformerStore(s.db) + + // Get all performers with TPDB source + performers, err := performerStore.Search("") + if err != nil { + syncStore.RecordSyncError("performers", err.Error()) + return result, err + } + + // Update each performer + for _, p := range performers { + if p.Source != "tpdb" || p.SourceID == "" { + result.Skipped++ + continue + } + + // Fetch updated data from TPDB + updated, err := s.scraper.GetPerformerByID(ctx, p.SourceID) + if err != nil { + fmt.Printf("⚠ Failed to fetch performer %s (ID: %d): %v\n", p.Name, p.ID, err) + result.Failed++ + continue + } + + // Preserve local ID + updated.ID = p.ID + + // Update in database + if err := performerStore.Update(updated); err != nil { + fmt.Printf("⚠ Failed to update performer %s (ID: %d): %v\n", p.Name, p.ID, err) + result.Failed++ + continue + } + + result.Updated++ + } + + // Record completion + if err := syncStore.RecordSyncComplete("performers", result.Updated, result.Failed, result.ErrorMessage); err != nil { + return result, err + } + + return result, nil +} + +// SyncStudios syncs all studios from TPDB +func (s *Service) SyncStudios(ctx context.Context, opts SyncOptions) (SyncResult, error) { + result := SyncResult{EntityType: "studios"} + start := time.Now() + defer func() { result.Duration = time.Since(start) }() + + syncStore := db.NewSyncStore(s.db) + + // Check rate limiting + if !opts.Force { + canSync, nextAllowed, err := syncStore.CanSync("studios", opts.MinInterval) + if err != nil { + return result, err + } + if !canSync { + result.Skipped = 1 + result.ErrorMessage = fmt.Sprintf("Rate limit: next sync allowed at %s", nextAllowed.Format(time.RFC3339)) + return result, nil + } + } + + // Record sync start + if err := syncStore.RecordSyncStart("studios"); err != nil { + return result, err + } + + studioStore := db.NewStudioStore(s.db) + + // Get all studios with TPDB source + studios, err := studioStore.Search("") + if err != nil { + syncStore.RecordSyncError("studios", err.Error()) + return result, err + } + + // Update each studio + for _, st := range studios { + if st.Source != "tpdb" || st.SourceID == "" { + result.Skipped++ + continue + } + + // Fetch updated data from TPDB + updated, err := s.scraper.GetStudioByID(ctx, st.SourceID) + if err != nil { + fmt.Printf("⚠ Failed to fetch studio %s (ID: %d): %v\n", st.Name, st.ID, err) + result.Failed++ + continue + } + + // Preserve local ID + updated.ID = st.ID + + // Update in database + if err := studioStore.Update(updated); err != nil { + fmt.Printf("⚠ Failed to update studio %s (ID: %d): %v\n", st.Name, st.ID, err) + result.Failed++ + continue + } + + result.Updated++ + } + + // Record completion + if err := syncStore.RecordSyncComplete("studios", result.Updated, result.Failed, result.ErrorMessage); err != nil { + return result, err + } + + return result, nil +} + +// SyncScenes syncs all scenes from TPDB +func (s *Service) SyncScenes(ctx context.Context, opts SyncOptions) (SyncResult, error) { + result := SyncResult{EntityType: "scenes"} + start := time.Now() + defer func() { result.Duration = time.Since(start) }() + + syncStore := db.NewSyncStore(s.db) + + // Check rate limiting + if !opts.Force { + canSync, nextAllowed, err := syncStore.CanSync("scenes", opts.MinInterval) + if err != nil { + return result, err + } + if !canSync { + result.Skipped = 1 + result.ErrorMessage = fmt.Sprintf("Rate limit: next sync allowed at %s", nextAllowed.Format(time.RFC3339)) + return result, nil + } + } + + // Record sync start + if err := syncStore.RecordSyncStart("scenes"); err != nil { + return result, err + } + + sceneStore := db.NewSceneStore(s.db) + + // Get all scenes with TPDB source + scenes, err := sceneStore.Search("") + if err != nil { + syncStore.RecordSyncError("scenes", err.Error()) + return result, err + } + + // Update each scene + for _, sc := range scenes { + if sc.Source != "tpdb" || sc.SourceID == "" { + result.Skipped++ + continue + } + + // Fetch updated data from TPDB + updated, err := s.scraper.GetSceneByID(ctx, sc.SourceID) + if err != nil { + fmt.Printf("⚠ Failed to fetch scene %s (ID: %d): %v\n", sc.Title, sc.ID, err) + result.Failed++ + continue + } + + // Preserve local ID + updated.ID = sc.ID + + // Update in database + if err := sceneStore.Update(updated); err != nil { + fmt.Printf("⚠ Failed to update scene %s (ID: %d): %v\n", sc.Title, sc.ID, err) + result.Failed++ + continue + } + + result.Updated++ + } + + // Record completion + if err := syncStore.RecordSyncComplete("scenes", result.Updated, result.Failed, result.ErrorMessage); err != nil { + return result, err + } + + return result, nil +} + +// GetSyncStatus returns the current sync status for all entity types +func (s *Service) GetSyncStatus() ([]db.SyncMetadata, error) { + syncStore := db.NewSyncStore(s.db) + return syncStore.GetAllSyncStatus() +} diff --git a/internal/web/server.go b/internal/web/server.go new file mode 100644 index 0000000..e62e376 --- /dev/null +++ b/internal/web/server.go @@ -0,0 +1,1089 @@ +package web + +import ( + "context" + "embed" + "encoding/json" + "fmt" + "html/template" + "net/http" + "os" + "strconv" + "time" + "io/fs" + + "git.leaktechnologies.dev/stu/Goondex/internal/db" + import_service "git.leaktechnologies.dev/stu/Goondex/internal/import" + "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" + "git.leaktechnologies.dev/stu/Goondex/internal/sync" +) + +// ============================================================================ +// EMBED STATIC + TEMPLATES +// ============================================================================ + +//go:embed templates/* static/**/* +var content embed.FS + +type Server struct { + db *db.DB + templates *template.Template + addr string +} + +func NewServer(database *db.DB, addr string) (*Server, error) { + tmpl, err := template.ParseFS(content, "templates/*.html") + if err != nil { + return nil, fmt.Errorf("failed to parse templates: %w", err) + } + + return &Server{ + db: database, + templates: tmpl, + addr: addr, + }, nil +} + +func (s *Server) Start() error { + mux := http.NewServeMux() + + // ============================================================================ + // FIXED STATIC SERVER — THIS IS THE CORRECT WAY + // ============================================================================ + + staticFS, err := fs.Sub(content, "static") + if err != nil { + return fmt.Errorf("failed to load embedded static directory: %w", err) + } + + mux.Handle( + "/static/", + http.StripPrefix( + "/static/", + http.FileServer(http.FS(staticFS)), + ), + ) + + // ============================================================================ + // ROUTES + // ============================================================================ + + mux.HandleFunc("/", s.handleDashboard) + mux.HandleFunc("/performers", s.handlePerformerList) + mux.HandleFunc("/performers/", s.handlePerformerDetail) + mux.HandleFunc("/studios", s.handleStudioList) + mux.HandleFunc("/studios/", s.handleStudioDetail) + mux.HandleFunc("/scenes", s.handleSceneList) + mux.HandleFunc("/scenes/", s.handleSceneDetail) + mux.HandleFunc("/movies", s.handleMovieList) + mux.HandleFunc("/movies/", s.handleMovieDetail) + + // API + mux.HandleFunc("/api/import/performer", s.handleAPIImportPerformer) + mux.HandleFunc("/api/import/studio", s.handleAPIImportStudio) + mux.HandleFunc("/api/import/scene", s.handleAPIImportScene) + + mux.HandleFunc("/api/import/all", s.handleAPIBulkImportAll) + mux.HandleFunc("/api/import/all-performers", s.handleAPIBulkImportPerformers) + mux.HandleFunc("/api/import/all-studios", s.handleAPIBulkImportStudios) + mux.HandleFunc("/api/import/all-scenes", s.handleAPIBulkImportScenes) + + mux.HandleFunc("/api/import/all-performers/progress", s.handleAPIBulkImportPerformersProgress) + mux.HandleFunc("/api/import/all-studios/progress", s.handleAPIBulkImportStudiosProgress) + mux.HandleFunc("/api/import/all-scenes/progress", s.handleAPIBulkImportScenesProgress) + + mux.HandleFunc("/api/sync", s.handleAPISync) + mux.HandleFunc("/api/sync/status", s.handleAPISyncStatus) + + mux.HandleFunc("/api/search", s.handleAPIGlobalSearch) + + // ============================================================================ + // START SERVER + // ============================================================================ + + fmt.Printf("Starting Goondex Web Server at http://%s\n", s.addr) + return http.ListenAndServe(s.addr, mux) +} + +// ============================================================================ +// PAGE HANDLERS +// ============================================================================ + +func (s *Server) handleDashboard(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/" { + http.NotFound(w, r) + return + } + + performerStore := db.NewPerformerStore(s.db) + studioStore := db.NewStudioStore(s.db) + sceneStore := db.NewSceneStore(s.db) + movieStore := db.NewMovieStore(s.db) + + performers, _ := performerStore.Search("") + studios, _ := studioStore.Search("") + scenes, _ := sceneStore.Search("") + movies, _ := movieStore.Search("") + + data := map[string]interface{}{ + "PerformerCount": len(performers), + "StudioCount": len(studios), + "SceneCount": len(scenes), + "MovieCount": len(movies), + } + + s.templates.ExecuteTemplate(w, "dashboard.html", data) +} + +func (s *Server) handlePerformerList(w http.ResponseWriter, r *http.Request) { + query := r.URL.Query().Get("q") + nationalityFilter := r.URL.Query().Get("nationality") + genderFilter := r.URL.Query().Get("gender") + + store := db.NewPerformerStore(s.db) + performers, err := store.Search(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Apply filters + var filteredPerformers []interface{} + for _, p := range performers { + // Filter by nationality if specified + if nationalityFilter != "" && nationalityFilter != "all" { + if p.Nationality != nationalityFilter { + continue + } + } + + // Filter by gender if specified + if genderFilter != "" && genderFilter != "all" { + if p.Gender != genderFilter { + continue + } + } + + filteredPerformers = append(filteredPerformers, p) + } + + type PerformerWithCount struct { + Performer interface{} + SceneCount int + Age int + CountryFlag string + } + + var performersWithCounts []PerformerWithCount + for _, p := range filteredPerformers { + performer := p.(model.Performer) + count, _ := store.GetSceneCount(performer.ID) + + // Calculate age from birthday + age := calculateAge(performer.Birthday) + + // Get country flag emoji + countryFlag := getCountryFlag(performer.Nationality) + + performersWithCounts = append(performersWithCounts, + PerformerWithCount{ + Performer: performer, + SceneCount: count, + Age: age, + CountryFlag: countryFlag, + }) + } + + // Get unique nationalities and genders for filter dropdowns + nationalitiesMap := make(map[string]bool) + gendersMap := make(map[string]bool) + for _, p := range performers { + if p.Nationality != "" { + nationalitiesMap[p.Nationality] = true + } + if p.Gender != "" { + gendersMap[p.Gender] = true + } + } + + var nationalities []string + for nat := range nationalitiesMap { + nationalities = append(nationalities, nat) + } + + var genders []string + for gender := range gendersMap { + genders = append(genders, gender) + } + + data := map[string]interface{}{ + "Performers": performersWithCounts, + "Query": query, + "Nationalities": nationalities, + "Genders": genders, + "SelectedNationality": nationalityFilter, + "SelectedGender": genderFilter, + } + + s.templates.ExecuteTemplate(w, "performers.html", data) +} + +// calculateAge calculates age from birthday string (YYYY-MM-DD) +func calculateAge(birthday string) int { + if birthday == "" { + return 0 + } + + birthDate, err := time.Parse("2006-01-02", birthday) + if err != nil { + return 0 + } + + now := time.Now() + age := now.Year() - birthDate.Year() + + // Adjust if birthday hasn't occurred yet this year + if now.Month() < birthDate.Month() || + (now.Month() == birthDate.Month() && now.Day() < birthDate.Day()) { + age-- + } + + return age +} + +// getCountryFlag converts ISO country code to flag emoji +func getCountryFlag(countryCode string) string { + if countryCode == "" { + return "" + } + + // Map of common ISO country codes to flag emojis + // Unicode flags are formed by regional indicator symbols + countryFlags := map[string]string{ + "US": "🇺🇸", "GB": "🇬🇧", "CA": "🇨🇦", "AU": "🇦🇺", + "FR": "🇫🇷", "DE": "🇩🇪", "IT": "🇮🇹", "ES": "🇪🇸", + "RU": "🇷🇺", "JP": "🇯🇵", "CN": "🇨🇳", "BR": "🇧🇷", + "MX": "🇲🇽", "AR": "🇦🇷", "CL": "🇨🇱", "CO": "🇨🇴", + "CZ": "🇨🇿", "HU": "🇭🇺", "PL": "🇵🇱", "RO": "🇷🇴", + "SK": "🇸🇰", "UA": "🇺🇦", "SE": "🇸🇪", "NO": "🇳🇴", + "FI": "🇫🇮", "DK": "🇩🇰", "NL": "🇳🇱", "BE": "🇧🇪", + "CH": "🇨🇭", "AT": "🇦🇹", "PT": "🇵🇹", "GR": "🇬🇷", + "IE": "🇮🇪", "NZ": "🇳🇿", "ZA": "🇿🇦", "IN": "🇮🇳", + "TH": "🇹🇭", "VN": "🇻🇳", "PH": "🇵🇭", "ID": "🇮🇩", + "MY": "🇲🇾", "SG": "🇸🇬", "KR": "🇰🇷", "TW": "🇹🇼", + "HK": "🇭🇰", "TR": "🇹🇷", "IL": "🇮🇱", "EG": "🇪🇬", + } + + if flag, ok := countryFlags[countryCode]; ok { + return flag + } + + // For codes not in our map, try to convert programmatically + // This works for any valid ISO 3166-1 alpha-2 code + if len(countryCode) == 2 { + code := []rune(countryCode) + if code[0] >= 'A' && code[0] <= 'Z' && code[1] >= 'A' && code[1] <= 'Z' { + // Regional indicator symbol base + const regionalBase = 0x1F1E6 - 'A' + return string([]rune{ + rune(regionalBase + code[0]), + rune(regionalBase + code[1]), + }) + } + } + + return "🌍" // Default globe emoji +} + +func (s *Server) handlePerformerDetail(w http.ResponseWriter, r *http.Request) { + idStr := r.URL.Path[len("/performers/"):] + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + http.Error(w, "Invalid performer ID", http.StatusBadRequest) + return + } + + performerStore := db.NewPerformerStore(s.db) + sceneStore := db.NewSceneStore(s.db) + + performer, err := performerStore.GetByID(id) + if err != nil { + http.NotFound(w, r) + return + } + + sceneCount, _ := performerStore.GetSceneCount(id) + scenes, _ := sceneStore.GetByPerformer(id) + + data := map[string]interface{}{ + "Performer": performer, + "SceneCount": sceneCount, + "Scenes": scenes, + } + + s.templates.ExecuteTemplate(w, "performer_detail.html", data) +} + +func (s *Server) handleStudioList(w http.ResponseWriter, r *http.Request) { + query := r.URL.Query().Get("q") + + store := db.NewStudioStore(s.db) + studios, err := store.Search(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + type StudioWithCount struct { + Studio interface{} + SceneCount int + } + + var studiosWithCounts []StudioWithCount + for _, st := range studios { + count, _ := store.GetSceneCount(st.ID) + studiosWithCounts = append(studiosWithCounts, + StudioWithCount{Studio: st, SceneCount: count}) + } + + data := map[string]interface{}{ + "Studios": studiosWithCounts, + "Query": query, + } + + s.templates.ExecuteTemplate(w, "studios.html", data) +} + +func (s *Server) handleStudioDetail(w http.ResponseWriter, r *http.Request) { + idStr := r.URL.Path[len("/studios/"):] + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + http.Error(w, "Invalid studio ID", http.StatusBadRequest) + return + } + + store := db.NewStudioStore(s.db) + studio, err := store.GetByID(id) + if err != nil { + http.NotFound(w, r) + return + } + + sceneCount, _ := store.GetSceneCount(id) + + data := map[string]interface{}{ + "Studio": studio, + "SceneCount": sceneCount, + } + + s.templates.ExecuteTemplate(w, "studio_detail.html", data) +} + +func (s *Server) handleSceneList(w http.ResponseWriter, r *http.Request) { + query := r.URL.Query().Get("q") + + sceneStore := db.NewSceneStore(s.db) + studioStore := db.NewStudioStore(s.db) + + scenes, err := sceneStore.Search(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + type SceneWithStudio struct { + Scene interface{} + StudioName string + } + + var scenesWithStudios []SceneWithStudio + for _, sc := range scenes { + studioName := "" + + if sc.StudioID != nil && *sc.StudioID > 0 { + if studio, err := studioStore.GetByID(*sc.StudioID); err == nil { + studioName = studio.Name + } + } + + scenesWithStudios = append(scenesWithStudios, + SceneWithStudio{Scene: sc, StudioName: studioName}) + } + + data := map[string]interface{}{ + "Scenes": scenesWithStudios, + "Query": query, + } + + s.templates.ExecuteTemplate(w, "scenes.html", data) +} + +func (s *Server) handleSceneDetail(w http.ResponseWriter, r *http.Request) { + idStr := r.URL.Path[len("/scenes/"):] + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + http.Error(w, "Invalid scene ID", http.StatusBadRequest) + return + } + + sceneStore := db.NewSceneStore(s.db) + studioStore := db.NewStudioStore(s.db) + + scene, err := sceneStore.GetByID(id) + if err != nil { + http.NotFound(w, r) + return + } + + performers, _ := sceneStore.GetPerformers(id) + tags, _ := sceneStore.GetTags(id) + movies, _ := sceneStore.GetMovies(id) + + studioName := "" + if scene.StudioID != nil && *scene.StudioID > 0 { + if studio, err := studioStore.GetByID(*scene.StudioID); err == nil { + studioName = studio.Name + } + } + + data := map[string]interface{}{ + "Scene": scene, + "Performers": performers, + "Tags": tags, + "Movies": movies, + "StudioName": studioName, + } + + s.templates.ExecuteTemplate(w, "scene_detail.html", data) +} + +func (s *Server) handleMovieList(w http.ResponseWriter, r *http.Request) { + query := r.URL.Query().Get("q") + + movieStore := db.NewMovieStore(s.db) + studioStore := db.NewStudioStore(s.db) + + movies, err := movieStore.Search(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + type MovieWithDetails struct { + Movie interface{} + StudioName string + SceneCount int + } + + var moviesWithDetails []MovieWithDetails + for _, m := range movies { + studioName := "" + if m.StudioID != nil && *m.StudioID > 0 { + if studio, err := studioStore.GetByID(*m.StudioID); err == nil { + studioName = studio.Name + } + } + + sceneCount, _ := movieStore.GetSceneCount(m.ID) + + moviesWithDetails = append(moviesWithDetails, + MovieWithDetails{Movie: m, StudioName: studioName, SceneCount: sceneCount}) + } + + data := map[string]interface{}{ + "Movies": moviesWithDetails, + "Query": query, + } + + s.templates.ExecuteTemplate(w, "movies.html", data) +} + +func (s *Server) handleMovieDetail(w http.ResponseWriter, r *http.Request) { + idStr := r.URL.Path[len("/movies/"):] + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + http.Error(w, "Invalid movie ID", http.StatusBadRequest) + return + } + + movieStore := db.NewMovieStore(s.db) + studioStore := db.NewStudioStore(s.db) + + movie, err := movieStore.GetByID(id) + if err != nil { + http.NotFound(w, r) + return + } + + scenes, _ := movieStore.GetScenes(id) + + studioName := "" + if movie.StudioID != nil && *movie.StudioID > 0 { + if studio, err := studioStore.GetByID(*movie.StudioID); err == nil { + studioName = studio.Name + } + } + + data := map[string]interface{}{ + "Movie": movie, + "Scenes": scenes, + "StudioName": studioName, + } + + s.templates.ExecuteTemplate(w, "movie_detail.html", data) +} + +// ============================================================================ +// API HANDLERS (NO LOGIC CHANGES — ONLY STATIC FIXES ABOVE) +// ============================================================================ + +type APIResponse struct { + Success bool `json:"success"` + Message string `json:"message"` + Data interface{} `json:"data,omitempty"` +} + +func (s *Server) handleAPIImportPerformer(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Query string `json:"query"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "Invalid request"}) + return + } + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + performers, err := scraper.SearchPerformers(context.Background(), req.Query) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Search failed: %v", err)}) + return + } + + if len(performers) == 0 { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "No performers found"}) + return + } + + store := db.NewPerformerStore(s.db) + imported := 0 + + for _, p := range performers { + fullPerformer, err := scraper.GetPerformerByID(context.Background(), p.SourceID) + if err != nil { + fullPerformer = &p + } + + if err := store.Create(fullPerformer); err != nil { + continue + } + imported++ + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d performer(s)", imported), + Data: map[string]int{"imported": imported, "found": len(performers)}, + }) +} + +func (s *Server) handleAPIImportStudio(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Query string `json:"query"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "Invalid request"}) + return + } + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + studios, err := scraper.SearchStudios(context.Background(), req.Query) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Search failed: %v", err)}) + return + } + + if len(studios) == 0 { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "No studios found"}) + return + } + + store := db.NewStudioStore(s.db) + imported := 0 + + for _, st := range studios { + if err := store.Create(&st); err != nil { + continue + } + imported++ + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d studio(s)", imported), + Data: map[string]int{"imported": imported, "found": len(studios)}, + }) +} + +func (s *Server) handleAPIImportScene(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Query string `json:"query"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "Invalid request"}) + return + } + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + scenes, err := scraper.SearchScenes(context.Background(), req.Query) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Search failed: %v", err)}) + return + } + + if len(scenes) == 0 { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "No scenes found"}) + return + } + + sceneStore := db.NewSceneStore(s.db) + performerStore := db.NewPerformerStore(s.db) + studioStore := db.NewStudioStore(s.db) + tagStore := db.NewTagStore(s.db) + + imported := 0 + + for _, sc := range scenes { + + if sc.Studio != nil { + if err := studioStore.Create(sc.Studio); err != nil { + studios, _ := studioStore.Search(sc.Studio.Name) + if len(studios) > 0 { + sc.StudioID = &studios[0].ID + } + } else { + sc.StudioID = &sc.Studio.ID + } + } + + if err := sceneStore.Create(&sc); err != nil { + continue + } + + for _, p := range sc.Performers { + if err := performerStore.Create(&p); err != nil { + performers, _ := performerStore.Search(p.Name) + if len(performers) > 0 { + p.ID = performers[0].ID + } + } + if p.ID > 0 { + sceneStore.AddPerformer(sc.ID, p.ID) + } + } + + for _, t := range sc.Tags { + existing, _ := tagStore.GetByName(t.Name) + if existing != nil { + t.ID = existing.ID + } else { + if err := tagStore.Create(&t); err != nil { + continue + } + } + if t.ID > 0 { + sceneStore.AddTag(sc.ID, t.ID) + } + } + + imported++ + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d scene(s)", imported), + Data: map[string]int{"imported": imported, "found": len(scenes)}, + }) +} + +func (s *Server) handleAPISync(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req struct { + Force bool `json:"force"` + } + json.NewDecoder(r.Body).Decode(&req) + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := sync.NewService(s.db, scraper) + + opts := sync.SyncOptions{ + Force: req.Force, + MinInterval: 24 * time.Hour, + } + + results, err := service.SyncAll(context.Background(), opts) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Sync failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: "Sync completed", + Data: results, + }) +} + +func (s *Server) handleAPISyncStatus(w http.ResponseWriter, r *http.Request) { + scraper := tpdb.NewScraper("https://api.theporndb.net", "") + service := sync.NewService(s.db, scraper) + + statuses, err := service.GetSyncStatus() + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Failed to get status: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: "Status retrieved", + Data: statuses, + }) +} + +// ============================================================================ +// BULK IMPORT ENDPOINTS + SSE PROGRESS +// ============================================================================ + +func (s *Server) handleAPIBulkImportAll(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + w.Header().Set("Content-Type", "application/json") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + results, err := service.BulkImportAll(context.Background()) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Bulk import failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: "Bulk import completed successfully", + Data: results, + }) +} + +func (s *Server) handleAPIBulkImportPerformers(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + w.Header().Set("Content-Type", "application/json") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + result, err := service.BulkImportAllPerformers(context.Background()) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Import failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d/%d performers", result.Imported, result.Total), + Data: result, + }) +} + +func (s *Server) handleAPIBulkImportStudios(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + w.Header().Set("Content-Type", "application/json") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + result, err := service.BulkImportAllStudios(context.Background()) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Import failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d/%d studios", result.Imported, result.Total), + Data: result, + }) +} + +func (s *Server) handleAPIBulkImportScenes(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + w.Header().Set("Content-Type", "application/json") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: "TPDB_API_KEY not configured"}) + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + result, err := service.BulkImportAllScenes(context.Background()) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Import failed: %v", err)}) + return + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Imported %d/%d scenes", result.Imported, result.Total), + Data: result, + }) +} + +func (s *Server) handleAPIBulkImportPerformersProgress(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/event-stream") + w.Header().Set("Cache-Control", "no-cache") + w.Header().Set("Connection", "keep-alive") + w.Header().Set("Access-Control-Allow-Origin", "*") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + fmt.Fprintf(w, "data: {\"error\": \"TPDB_API_KEY not configured\"}\n\n") + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + flusher, ok := w.(http.Flusher) + if !ok { + http.Error(w, "Streaming unsupported", http.StatusInternalServerError) + return + } + + progressCallback := func(update import_service.ProgressUpdate) { + data, _ := json.Marshal(update) + fmt.Fprintf(w, "data: %s\n\n", data) + flusher.Flush() + } + + result, err := service.BulkImportAllPerformersWithProgress( + context.Background(), progressCallback) + + if err != nil { + fmt.Fprintf(w, "data: {\"error\": \"%s\"}\n\n", err.Error()) + } else { + data, _ := json.Marshal(map[string]interface{}{ + "complete": true, + "result": result, + }) + fmt.Fprintf(w, "data: %s\n\n", data) + } + + flusher.Flush() +} + +func (s *Server) handleAPIBulkImportStudiosProgress(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/event-stream") + w.Header().Set("Cache-Control", "no-cache") + w.Header().Set("Connection", "keep-alive") + w.Header().Set("Access-Control-Allow-Origin", "*") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + fmt.Fprintf(w, "data: {\"error\": \"TPDB_API_KEY not configured\"}\n\n") + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + flusher, ok := w.(http.Flusher) + if !ok { + http.Error(w, "Streaming unsupported", http.StatusInternalServerError) + return + } + + progressCallback := func(update import_service.ProgressUpdate) { + data, _ := json.Marshal(update) + fmt.Fprintf(w, "data: %s\n\n", data) + flusher.Flush() + } + + result, err := service.BulkImportAllStudiosWithProgress( + context.Background(), progressCallback) + + if err != nil { + fmt.Fprintf(w, "data: {\"error\": \"%s\"}\n\n", err.Error()) + } else { + data, _ := json.Marshal(map[string]interface{}{ + "complete": true, + "result": result, + }) + fmt.Fprintf(w, "data: %s\n\n", data) + } + + flusher.Flush() +} + +func (s *Server) handleAPIBulkImportScenesProgress(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/event-stream") + w.Header().Set("Cache-Control", "no-cache") + w.Header().Set("Connection", "keep-alive") + w.Header().Set("Access-Control-Allow-Origin", "*") + + apiKey := os.Getenv("TPDB_API_KEY") + if apiKey == "" { + fmt.Fprintf(w, "data: {\"error\": \"TPDB_API_KEY not configured\"}\n\n") + return + } + + scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, scraper) + + flusher, ok := w.(http.Flusher) + if !ok { + http.Error(w, "Streaming unsupported", http.StatusInternalServerError) + return + } + + progressCallback := func(update import_service.ProgressUpdate) { + data, _ := json.Marshal(update) + fmt.Fprintf(w, "data: %s\n\n", data) + flusher.Flush() + } + + result, err := service.BulkImportAllScenesWithProgress( + context.Background(), progressCallback) + + if err != nil { + fmt.Fprintf(w, "data: {\"error\": \"%s\"}\n\n", err.Error()) + } else { + data, _ := json.Marshal(map[string]interface{}{ + "complete": true, + "result": result, + }) + fmt.Fprintf(w, "data: %s\n\n", data) + } + + flusher.Flush() +} + +// ============================================================================ +// GLOBAL SEARCH +// ============================================================================ + +func (s *Server) handleAPIGlobalSearch(w http.ResponseWriter, r *http.Request) { + query := r.URL.Query().Get("q") + if query == "" { + json.NewEncoder(w).Encode(APIResponse{ + Success: false, + Message: "Query parameter required", + }) + return + } + + performerStore := db.NewPerformerStore(s.db) + studioStore := db.NewStudioStore(s.db) + sceneStore := db.NewSceneStore(s.db) + tagStore := db.NewTagStore(s.db) + + performers, _ := performerStore.Search(query) + studios, _ := studioStore.Search(query) + scenes, _ := sceneStore.Search(query) + tags, _ := tagStore.Search(query) + + results := map[string]interface{}{ + "performers": performers, + "studios": studios, + "scenes": scenes, + "tags": tags, + "total": len(performers) + len(studios) + len(scenes) + len(tags), + } + + json.NewEncoder(w).Encode(APIResponse{ + Success: true, + Message: fmt.Sprintf("Found %d results", results["total"]), + Data: results, + }) +} diff --git a/internal/web/static/css/buttons.css b/internal/web/static/css/buttons.css new file mode 100644 index 0000000..d4a164c --- /dev/null +++ b/internal/web/static/css/buttons.css @@ -0,0 +1,177 @@ +/* + * GOONDEX — BUTTONS + * Modern neon-subtle buttons using Flamingo Pink brand theme. + * Compatible with GX_Button + theme variables. + */ + +/* ================================ + * BASE BUTTON STYLE + * ================================ */ +.btn { + display: inline-flex; + align-items: center; + justify-content: center; + + padding: 0.85rem 1.8rem; + border-radius: var(--radius); + + font-size: 1rem; + font-weight: 600; + cursor: pointer; + + color: var(--color-text-primary); + background: var(--color-bg-elevated); + + border: 1px solid var(--color-border-soft); + + transition: background var(--transition), + border-color var(--transition), + box-shadow var(--transition), + transform var(--transition-fast); +} + +/* Hover glow (SUBTLE, medium intensity) */ +.btn:hover { + background: var(--color-bg-card); + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); + transform: translateY(-2px); +} + +/* Active press */ +.btn:active { + transform: translateY(0); + box-shadow: none; +} + +/* Disabled */ +.btn:disabled { + opacity: 0.45; + cursor: not-allowed; + box-shadow: none; +} + + +/* ================================ + * PRIMARY BUTTON + * ================================ */ +.btn-primary, +.btn.brand, +.btn.pink { + background: linear-gradient( + 135deg, + var(--color-brand) 0%, + var(--color-brand-hover) 90% + ); + border: none; + color: #fff; + text-shadow: 0 0 8px rgba(255, 255, 255, 0.25); +} + +.btn-primary:hover, +.btn.brand:hover, +.btn.pink:hover { + box-shadow: var(--shadow-glow-pink); + transform: translateY(-2px); +} + + +/* ================================ + * SECONDARY BUTTON + * ================================ */ +.btn-secondary { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + color: var(--color-text-primary); +} + +.btn-secondary:hover { + border-color: var(--color-brand); + color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + + +/* ================================ + * SMALL BUTTONS (STAT CARDS) + * ================================ */ +.btn-small { + padding: 0.55rem 1.1rem; + font-size: 0.85rem; + border-radius: calc(var(--radius) - 4px); +} + +.btn-small:hover { + transform: translateY(-1px); +} + + +/* ================================ + * FULL-WIDTH BUTTONS + * ================================ */ +.btn-block { + display: flex; + width: 100%; +} + + +/* ================================ + * GHOST BUTTON + * (transparent, subtle neon edges) + * ================================ */ +.btn-ghost { + background: transparent; + border: 1px solid var(--color-border-soft); + color: var(--color-text-secondary); +} + +.btn-ghost:hover { + border-color: var(--color-brand); + color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + + +/* ================================ + * DANGER BUTTON (warning orange) + * ================================ */ +.btn-danger { + background: var(--color-warning); + color: #000; + border: none; +} + +.btn-danger:hover { + background: #ffc7a8; +} + + +/* ================================ + * BUTTON HOVER EFFECT (GX-style) + * ================================ */ +.btn .hoverEffect, +.btn-secondary .hoverEffect, +.btn-small .hoverEffect { + position: relative; + pointer-events: none; +} + +.btn .hoverEffect div, +.btn-secondary .hoverEffect div, +.btn-small .hoverEffect div { + position: absolute; + inset: 0; + border-radius: inherit; + opacity: 0; + background: radial-gradient(circle, + var(--color-brand-glow) 0%, + transparent 80% + ); + transition: opacity 0.35s ease; +} + +.btn:hover .hoverEffect div, +.btn-secondary:hover .hoverEffect div, +.btn-small:hover .hoverEffect div { + opacity: 1; +} diff --git a/internal/web/static/css/components.css b/internal/web/static/css/components.css new file mode 100644 index 0000000..1dfb068 --- /dev/null +++ b/internal/web/static/css/components.css @@ -0,0 +1,230 @@ +/* + * GOONDEX — COMPONENTS + * Cards, stats, modals, chips, search, utility components. + * Unified with dark-only theme + medium-intensity Flamingo Pink accents. + */ + +/* ============================================ + * CARD — Base component + * ============================================ */ +.card { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + padding: 1.5rem; + box-shadow: var(--shadow-elevated); + transition: background var(--transition), box-shadow var(--transition); +} + +.card:hover { + background: var(--color-bg-elevated); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* ============================================ + * STAT CARDS (Dashboard) + * ============================================ */ +.stat-card { + background: var(--color-bg-card); + border-radius: var(--radius); + padding: 1.5rem; + display: flex; + align-items: center; + gap: 1.2rem; + + border: 1px solid var(--color-border-soft); + box-shadow: var(--shadow-elevated); + transition: transform var(--transition), box-shadow var(--transition); +} + +.stat-card:hover { + transform: translateY(-2px); + box-shadow: var(--shadow-glow-pink); +} + +.stat-icon { + font-size: 2.2rem; + color: var(--color-brand); + text-shadow: 0 0 10px var(--color-brand-glow); +} + +.stat-content { + flex: 1; +} + +.stat-value { + font-size: 2.2rem; + font-weight: 700; + color: var(--color-text-primary); +} + +.stat-label { + font-size: 0.95rem; + color: var(--color-text-secondary); +} + +.stat-actions { + display: flex; + flex-direction: column; + gap: 0.35rem; + text-align: right; +} + +.stat-link { + color: var(--color-brand); + font-size: 0.9rem; + text-decoration: none; +} + +.stat-link:hover { + text-decoration: underline; +} + +/* ============================================ + * SEARCH RESULTS DROPDOWN + * ============================================ */ +.search-results { + margin-top: 0.75rem; + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + box-shadow: var(--shadow-elevated); + max-height: 340px; + overflow-y: auto; + padding: 0.5rem; +} + +.search-result-item { + padding: 0.75rem 1rem; + border-radius: var(--radius); + cursor: pointer; + transition: background var(--transition); +} + +.search-result-item:hover { + background: rgba(255, 79, 163, 0.08); +} + +.search-result-title { + font-size: 1rem; + font-weight: 600; + color: var(--color-text-primary); +} + + +/* ============================================ + * TAG / CHIP COMPONENTS + * ============================================ */ +.tag, +.chip { + display: inline-block; + padding: 0.35rem 0.7rem; + font-size: 0.8rem; + border-radius: var(--radius); + background: rgba(255, 79, 163, 0.15); + border: 1px solid rgba(255, 79, 163, 0.28); + color: var(--color-brand); + margin-right: 0.4rem; + margin-bottom: 0.4rem; + text-transform: capitalize; + white-space: nowrap; +} + + + +/* ============================================ + * MODALS (Overlays, boxes, close buttons) + * ============================================ */ +.modal-overlay { + position: fixed; + inset: 0; + background: rgba(0, 0, 0, 0.78); + backdrop-filter: blur(4px); + z-index: 1000; + + opacity: 0; + pointer-events: none; + transition: opacity var(--transition); +} + +.modal-overlay.active { + opacity: 1; + pointer-events: auto; +} + +.modal-box { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius-soft); + padding: 2rem; + max-width: 520px; + + margin: 12vh auto 0 auto; + box-shadow: var(--shadow-elevated), var(--shadow-glow-pink-soft); + animation: modalFadeIn 0.25s ease; +} + +@keyframes modalFadeIn { + from { opacity: 0; transform: translateY(-12px); } + to { opacity: 1; transform: translateY(0); } +} + +.modal-close { + position: absolute; + top: 1.1rem; + right: 1.4rem; + font-size: 1.4rem; + color: var(--color-text-secondary); + cursor: pointer; + transition: color var(--transition); +} + +.modal-close:hover { + color: var(--color-brand); +} + +/* Modal title */ +.modal-title { + font-size: 1.6rem; + font-weight: 700; + margin-bottom: 1rem; + + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +/* ============================================ + * GRID + UTIL COMPONENTS + * ============================================ */ +.grid-2 { + display: grid; + grid-template-columns: repeat(2, 1fr); + gap: 1.5rem; +} + +.grid-3 { + display: grid; + grid-template-columns: repeat(3, 1fr); + gap: 1.5rem; +} + +.grid-auto { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(260px, 1fr)); + gap: 1.5rem; +} + +/* Glow divider */ +.divider-glow { + height: 1px; + width: 100%; + margin: 1.5rem 0; + background: linear-gradient( + 90deg, + transparent, + rgba(255, 79, 163, 0.25), + transparent + ); +} + diff --git a/internal/web/static/css/forms.css b/internal/web/static/css/forms.css new file mode 100644 index 0000000..4ce130c --- /dev/null +++ b/internal/web/static/css/forms.css @@ -0,0 +1,171 @@ +/* + * GOONDEX — FORMS + * Inputs, textareas, selects, labels. + * Neon-subtle Flamingo Pink accent + dark UI. + */ + +/* ================================ + * FORM LABELS + * ================================ */ +label { + display: block; + margin-bottom: 0.35rem; + font-size: 0.9rem; + font-weight: 600; + color: var(--color-text-primary); +} + + + +/* ================================ + * INPUT BASE STYLE + * (Text, search, email, password, number) + * ================================ */ +input[type="text"], +input[type="search"], +input[type="email"], +input[type="password"], +input[type="number"], +textarea, +select { + width: 100%; + padding: 0.9rem 1rem; + + background: var(--color-bg-card); + color: var(--color-text-primary); + + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + + font-size: 1rem; + outline: none; + + transition: border-color var(--transition), + box-shadow var(--transition), + background var(--transition); +} + +/* Hover */ +input:hover, +textarea:hover, +select:hover { + border-color: var(--color-brand); +} + +/* Focus (medium neon glow) */ +input:focus, +textarea:focus, +select:focus { + border-color: var(--color-brand); + box-shadow: 0 0 0 3px rgba(255, 79, 163, 0.18), + var(--shadow-glow-pink-soft); + background: var(--color-bg-elevated); +} + + + +/* ================================ + * TEXTAREA + * ================================ */ +textarea { + min-height: 140px; + resize: vertical; +} + + + +/* ================================ + * SELECT DROPDOWN + * ================================ */ +select { + appearance: none; + background-image: url("data:image/svg+xml;utf8,"); + background-repeat: no-repeat; + background-position: right 1rem center; + background-size: 14px; +} + + + +/* ================================ + * CHECKBOXES (standard form) + * NOTE: GX_Checkbox.css overrides these for custom components. + * ================================ */ +input[type="checkbox"] { + width: 18px; + height: 18px; + border-radius: 4px; + + border: 1px solid var(--color-border-soft); + background: var(--color-bg-card); + + cursor: pointer; + position: relative; +} + +input[type="checkbox"]:checked { + background: var(--color-brand); + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + + + +/* ================================ + * FORM GROUP SPACING + * ================================ */ +.form-group { + margin-bottom: 1.4rem; +} + + + +/* ================================ + * PLACEHOLDER TEXT + * ================================ */ +::placeholder { + color: var(--color-text-secondary); + opacity: 0.6; +} + + + +/* ================================ + * SEARCH BAR GLOBAL STYLE + * Matches Dashboard "Global Search" + * ================================ */ +input.global-search, +#global-search.input { +padding: 1rem 1.2rem; +font-size: 1.05rem; + +border-radius: var(--radius-soft); +background: var(--color-bg-elevated); +border: 1px solid var(--color-border-soft); +} + +input.global-search:hover, +#global-search.input:hover { +border-color: var(--color-brand); +} + +input.global-search:focus, +#global-search.input:focus { +border-color: var(--color-brand); +background: var(--color-bg-card); +box-shadow: var(--shadow-glow-pink-soft); +} + + + +/* ================================ + * ERROR / WARNING STATES + * ================================ */ +.input-error { + border-color: var(--color-warning); + background: rgba(255, 170, 136, 0.05); +} + +.input-error:focus { + box-shadow: 0 0 0 3px rgba(255, 170, 136, 0.25); +} diff --git a/internal/web/static/css/goondex.css b/internal/web/static/css/goondex.css new file mode 100644 index 0000000..9b2a5e1 --- /dev/null +++ b/internal/web/static/css/goondex.css @@ -0,0 +1,32 @@ +/* ============================================================================ + * Goondex Master Stylesheet + * Dark-Only • Neon Flamingo Pink Accents (Medium Intensity) + * ============================================================================ */ + +/* ===== GX COMPONENT LIBRARY ================================================= */ +@import 'gx/GX_Button.css'; +@import 'gx/GX_CardGrid.css'; +@import 'gx/GX_Checkbox.css'; +@import 'gx/GX_Input.css'; +@import 'gx/GX_Loader.css'; + +/* ===== BASE THEME & VARIABLES =============================================== */ +@import 'theme.css'; + +/* ===== LAYOUT & STRUCTURE =================================================== */ +@import 'layout.css'; +@import 'navbar.css'; +@import 'sidepanels.css'; + +/* ===== PAGE-LEVEL COMPONENTS ================================================ */ +@import 'hero.css'; +@import 'stats.css'; +@import 'forms.css'; +@import 'buttons.css'; +@import 'components.css'; + +/* ===== GLOBAL PAGE STYLES =================================================== */ +@import 'pages.css'; + +/* ===== RESPONSIVE OVERRIDES (MOBILE/TABLET/HALF-SCREEN) ===================== */ +@import 'responsive.css'; diff --git a/internal/web/static/css/gx/GX_Button.css b/internal/web/static/css/gx/GX_Button.css new file mode 100644 index 0000000..3556496 --- /dev/null +++ b/internal/web/static/css/gx/GX_Button.css @@ -0,0 +1,118 @@ +/* + * GX BUTTON — Premium Minimal Neon + * For: Goondex Dark-Only Theme + * Accents: Flamingo Pink (C1), Medium Glow + */ + +.gx-btn { + display: inline-flex; + align-items: center; + justify-content: center; + + padding: 0.75rem 1.6rem; + border-radius: var(--radius); + + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + + color: var(--color-text-primary); + font-size: 1rem; + font-weight: 600; + letter-spacing: 0.3px; + + cursor: pointer; + text-decoration: none; + user-select: none; + + transition: + background var(--transition), + border-color var(--transition), + color var(--transition), + transform var(--transition-fast), + box-shadow var(--transition); + + box-shadow: 0 0 0 rgba(0,0,0,0); /* no glow at rest */ +} + +/* Hover — subtle neon lift */ +.gx-btn:hover { + border-color: var(--color-brand); + background: rgba(255, 79, 163, 0.09); /* light pink wash */ + box-shadow: var(--shadow-glow-pink-soft); + transform: translateY(-2px); +} + +/* Active press */ +.gx-btn:active { + transform: translateY(0); + box-shadow: inset 0 0 14px rgba(255, 79, 163, 0.25); +} + +/* Disabled */ +.gx-btn.disabled, +.gx-btn:disabled { + opacity: 0.4; + cursor: not-allowed; + pointer-events: none; +} + +/* ===== VARIANTS ===== */ + +/* Primary — Flamingo Pink accented */ +.gx-btn-primary { + border-color: var(--color-brand); + background: linear-gradient( + 135deg, + rgba(255, 79, 163, 0.14), + rgba(255, 79, 163, 0.06) + ); + color: var(--color-brand); +} + +.gx-btn-primary:hover { + background: linear-gradient( + 135deg, + rgba(255, 79, 163, 0.2), + rgba(255, 79, 163, 0.1) + ); + box-shadow: var(--shadow-glow-pink); + color: var(--color-brand-hover); +} + +/* Secondary — clean monochrome button */ +.gx-btn-secondary { + background: var(--color-bg-elevated); + border-color: var(--color-border); + color: var(--color-text-secondary); +} + +.gx-btn-secondary:hover { + background: var(--color-bg-card); + border-color: var(--color-border-soft); + color: var(--color-text-primary); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Danger — warnings, deletions */ +.gx-btn-danger { + border-color: var(--color-warning); + color: var(--color-warning); +} + +.gx-btn-danger:hover { + background: rgba(255, 170, 136, 0.1); + box-shadow: 0 0 18px rgba(255, 170, 136, 0.22); +} + +/* Fit small buttons in stat cards, modals, etc */ +.gx-btn-small { + padding: 0.45rem 1rem; + font-size: 0.85rem; + border-radius: var(--radius); +} + +/* Full-width (mobile-friendly) */ +.gx-btn-block { + width: 100%; + display: flex; +} diff --git a/internal/web/static/css/gx/GX_Button.html b/internal/web/static/css/gx/GX_Button.html new file mode 100644 index 0000000..5c01c6b --- /dev/null +++ b/internal/web/static/css/gx/GX_Button.html @@ -0,0 +1,16 @@ +
+ + + + + +

+ + + + +

+ + + +
diff --git a/internal/web/static/css/gx/GX_CardGrid.css b/internal/web/static/css/gx/GX_CardGrid.css new file mode 100644 index 0000000..b62155b --- /dev/null +++ b/internal/web/static/css/gx/GX_CardGrid.css @@ -0,0 +1,103 @@ +/* + * GX CARD GRID — Performer / Studio / Scene cards + * Dark luxury aesthetic, Flamingo Pink medium glow, responsive columns + */ + +/* WRAPPER */ +.gx-card-grid { + display: grid; + gap: 1.6rem; + padding: 1rem 0; + grid-template-columns: repeat(auto-fill, minmax(250px, 1fr)); +} + +/* CARD */ +.gx-card { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius-soft); + overflow: hidden; + + box-shadow: var(--shadow-elevated); + transition: transform var(--transition), + box-shadow var(--transition), + border-color var(--transition); + + cursor: pointer; + position: relative; +} + +/* HOVER EFFECT */ +.gx-card:hover { + transform: translateY(-4px); + border-color: var(--color-brand); + box-shadow: 0 0 18px rgba(255, 79, 163, 0.28), + 0 6px 24px rgba(0, 0, 0, 0.55); +} + +/* THUMBNAIL */ +.gx-card-thumb { + width: 100%; + aspect-ratio: 3 / 4; + background-size: cover; + background-position: center; + filter: brightness(0.92); + transition: filter var(--transition-fast); +} + +.gx-card:hover .gx-card-thumb { + filter: brightness(1); +} + +/* CONTENT */ +.gx-card-body { + padding: 1rem; +} + +/* TITLE */ +.gx-card-title { + font-size: 1.1rem; + font-weight: 600; + margin-bottom: 0.35rem; + + background: linear-gradient(135deg, var(--color-text-primary), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +/* SMALL META (scene count, category, etc.) */ +.gx-card-meta { + font-size: 0.85rem; + color: var(--color-text-secondary); + opacity: 0.9; +} + +/* TAGS inside cards (optional) */ +.gx-card-tags { + margin-top: 0.8rem; + display: flex; + flex-wrap: wrap; + gap: 0.4rem; +} + +.gx-card-tag { + padding: 0.2rem 0.55rem; + font-size: 0.75rem; + border-radius: var(--radius); + background: rgba(255, 79, 163, 0.08); + color: var(--color-brand); + border: 1px solid rgba(255, 79, 163, 0.25); + text-transform: uppercase; + letter-spacing: 0.03em; +} + +/* MOBILE OPTIMISATION */ +@media (max-width: 550px) { + .gx-card-grid { + grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); + } + + .gx-card-title { + font-size: 1rem; + } +} diff --git a/internal/web/static/css/gx/GX_CardGrid.html b/internal/web/static/css/gx/GX_CardGrid.html new file mode 100644 index 0000000..de8ce1a --- /dev/null +++ b/internal/web/static/css/gx/GX_CardGrid.html @@ -0,0 +1,23 @@ +
+ + {{range .Performers}} +
+ +
+
+ +
+
{{.Performer.Name}}
+
{{.SceneCount}} scenes
+ +
+ {{range .Performer.Tags}} + {{.Name}} + {{end}} +
+
+
+ {{end}} + +
diff --git a/internal/web/static/css/gx/GX_Checkbox.css b/internal/web/static/css/gx/GX_Checkbox.css new file mode 100644 index 0000000..4742cc0 --- /dev/null +++ b/internal/web/static/css/gx/GX_Checkbox.css @@ -0,0 +1,88 @@ +/* + * GX CHECKBOX — Premium Minimal Neon + * Dark-only, Flamingo Pink subtle glow + */ + +.gx-checkbox { + display: flex; + align-items: center; + gap: 0.6rem; + cursor: pointer; + user-select: none; + + font-size: 1rem; + color: var(--color-text-primary); + + transition: color var(--transition-fast); +} + +/* Hide native checkbox */ +.gx-checkbox input { + position: absolute; + opacity: 0; + pointer-events: none; +} + +/* Custom box */ +.gx-checkbox-box { + width: 20px; + height: 20px; + border-radius: 6px; + + background: var(--color-bg-card); + border: 2px solid var(--color-border-soft); + + display: flex; + align-items: center; + justify-content: center; + + transition: + border-color var(--transition), + background var(--transition), + box-shadow var(--transition-fast), + transform var(--transition-fast); +} + +/* Checkmark icon */ +.gx-checkbox-check { + width: 12px; + height: 12px; + opacity: 0; + transform: scale(0.5); + transition: opacity var(--transition), transform var(--transition-fast); + + background: var(--color-brand); + clip-path: polygon( + 14% 44%, 0 58%, 40% 100%, + 100% 6%, 84% -6%, 38% 72% + ); +} + +/* Hover — soft neon border */ +.gx-checkbox:hover .gx-checkbox-box { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Checked state */ +.gx-checkbox input:checked + .gx-checkbox-box { + background: rgba(255, 79, 163, 0.12); + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink); + + transform: translateY(-1px); +} + +/* Reveal checkmark */ +.gx-checkbox input:checked + .gx-checkbox-box .gx-checkbox-check { + opacity: 1; + transform: scale(1); +} + +/* Disabled */ +.gx-checkbox.disabled, +.gx-checkbox input:disabled { + opacity: 0.4; + cursor: not-allowed; + pointer-events: none; +} diff --git a/internal/web/static/css/gx/GX_Checkbox.html b/internal/web/static/css/gx/GX_Checkbox.html new file mode 100644 index 0000000..e54447c --- /dev/null +++ b/internal/web/static/css/gx/GX_Checkbox.html @@ -0,0 +1,31 @@ +
+ + + +

+ + + +

+ + + +
diff --git a/internal/web/static/css/gx/GX_ContextMenu.css b/internal/web/static/css/gx/GX_ContextMenu.css new file mode 100644 index 0000000..0b8d77e --- /dev/null +++ b/internal/web/static/css/gx/GX_ContextMenu.css @@ -0,0 +1,79 @@ +/* + * GX CONTEXT MENU + * Premium dark context menu with Flamingo Pink accent. + */ + +.gx-contextmenu { + position: fixed; + top: 0; + left: 0; + width: 220px; + background: var(--color-bg-card); + border-radius: var(--radius); + border: 1px solid rgba(255, 79, 163, 0.20); + + padding: 6px 0; + list-style: none; + + opacity: 0; + scale: 0.92; + pointer-events: none; + + z-index: 99999; + + /* subtle multidirectional glow */ + box-shadow: + 0 12px 32px rgba(0, 0, 0, 0.65), + 0 0 22px rgba(255, 79, 163, 0.14); + + backdrop-filter: blur(4px); + + transition: + opacity 0.18s ease-out, + scale 0.16s cubic-bezier(0.2, 0.9, 0.25, 1.4); +} + +.gx-contextmenu.show { + opacity: 1; + scale: 1; + pointer-events: auto; +} + +.gx-contextmenu-item { + padding: 10px 14px; + color: var(--color-text-primary); + cursor: pointer; + font-size: 0.95rem; + + display: flex; + justify-content: space-between; + align-items: center; + + transition: background var(--transition-fast), + color var(--transition-fast); +} + +/* Hover */ +.gx-contextmenu-item:hover { + background: rgba(255, 79, 163, 0.12); + color: var(--color-brand); +} + +/* Divider */ +.gx-contextmenu-divider { + height: 1px; + background: rgba(255, 79, 163, 0.15); + margin: 6px 0; +} + +/* Keyboard focused */ +.gx-contextmenu-item.focused { + background: rgba(255, 79, 163, 0.18); + color: var(--color-brand-hover); +} + +/* Optional submenu arrow (future support) */ +.gx-contextmenu-item .submenu-arrow { + opacity: 0.6; + font-size: 0.75rem; +} diff --git a/internal/web/static/css/gx/GX_ContextMenu.html b/internal/web/static/css/gx/GX_ContextMenu.html new file mode 100644 index 0000000..e404adf --- /dev/null +++ b/internal/web/static/css/gx/GX_ContextMenu.html @@ -0,0 +1 @@ + diff --git a/internal/web/static/css/gx/GX_ContextMenu.js b/internal/web/static/css/gx/GX_ContextMenu.js new file mode 100644 index 0000000..3b27c64 --- /dev/null +++ b/internal/web/static/css/gx/GX_ContextMenu.js @@ -0,0 +1,115 @@ + diff --git a/internal/web/static/css/gx/GX_Dialog.css b/internal/web/static/css/gx/GX_Dialog.css new file mode 100644 index 0000000..d89b083 --- /dev/null +++ b/internal/web/static/css/gx/GX_Dialog.css @@ -0,0 +1,126 @@ +/* + * GX DIALOG (MODAL) + * Premium dark dialog system with Flamingo Pink accents. + * Subtle shadows, soft glow, cinematic overlay fade. + */ + +.gx-dialog-overlay { + position: fixed; + inset: 0; + background: rgba(0, 0, 0, 0.65); + backdrop-filter: blur(6px); + z-index: 9000; + + opacity: 0; + pointer-events: none; + transition: opacity 0.25s ease; +} + +.gx-dialog-overlay.show { + opacity: 1; + pointer-events: auto; +} + +.gx-dialog { + position: fixed; + top: 50%; + left: 50%; + translate: -50% -46%; /* slightly above center for cinematic drop */ + width: min(480px, 92vw); + + background: var(--color-bg-card); + border-radius: var(--radius-soft); + border: 1px solid rgba(255, 79, 163, 0.20); + + box-shadow: + 0 18px 48px rgba(0, 0, 0, 0.65), + 0 0 34px rgba(255, 79, 163, 0.14); + + opacity: 0; + scale: 0.92; + pointer-events: none; + + padding: 1.8rem 2rem 2.2rem; + + z-index: 9999; + + transition: + opacity 0.25s ease, + scale 0.22s ease, + translate 0.28s cubic-bezier(0.16, 1, 0.3, 1); +} + +.gx-dialog.show { + opacity: 1; + scale: 1; + pointer-events: auto; + translate: -50% -50%; +} + +/* ----- Title + Text ----- */ +.gx-dialog-title { + font-size: 1.65rem; + font-weight: 700; + margin-bottom: 0.5rem; + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.gx-dialog-body { + font-size: 1rem; + color: var(--color-text-secondary); + margin-bottom: 1.8rem; + line-height: 1.55; +} + +/* ----- Buttons area ----- */ +.gx-dialog-actions { + display: flex; + justify-content: flex-end; + gap: 1rem; +} + +/* Accent button */ +.gx-dialog-btn-primary { + padding: 0.6rem 1.2rem; + background: var(--color-brand); + color: #fff; + border-radius: var(--radius); + border: none; + cursor: pointer; + transition: background var(--transition); + box-shadow: var(--shadow-glow-pink-soft); +} + +.gx-dialog-btn-primary:hover { + background: var(--color-brand-hover); + box-shadow: var(--shadow-glow-pink); +} + +/* Secondary (ghost) */ +.gx-dialog-btn-secondary { + padding: 0.6rem 1.1rem; + background: rgba(255, 79, 163, 0.08); + border: 1px solid rgba(255, 79, 163, 0.20); + color: var(--color-text-primary); + border-radius: var(--radius); + cursor: pointer; + transition: background var(--transition), border var(--transition); +} + +.gx-dialog-btn-secondary:hover { + background: rgba(255, 79, 163, 0.15); + border-color: rgba(255, 79, 163, 0.35); +} + +/* Mobile enhancements */ +@media (max-width: 540px) { + .gx-dialog { + padding: 1.4rem 1.5rem 1.8rem; + } + + .gx-dialog-title { + font-size: 1.45rem; + } +} diff --git a/internal/web/static/css/gx/GX_Dialog.html b/internal/web/static/css/gx/GX_Dialog.html new file mode 100644 index 0000000..a5d445a --- /dev/null +++ b/internal/web/static/css/gx/GX_Dialog.html @@ -0,0 +1,14 @@ +
+ + diff --git a/internal/web/static/css/gx/GX_Dialog.js b/internal/web/static/css/gx/GX_Dialog.js new file mode 100644 index 0000000..ced7bf7 --- /dev/null +++ b/internal/web/static/css/gx/GX_Dialog.js @@ -0,0 +1,48 @@ + diff --git a/internal/web/static/css/gx/GX_FilterBar.css b/internal/web/static/css/gx/GX_FilterBar.css new file mode 100644 index 0000000..b7d4ec9 --- /dev/null +++ b/internal/web/static/css/gx/GX_FilterBar.css @@ -0,0 +1,114 @@ +/* + * GX FILTER BAR + * Sticky top filter system with dark luxury + Flamingo Pink accents + */ + +.gx-filterbar { + width: 100%; + background: var(--color-bg-card); + border-bottom: 1px solid var(--color-border-soft); + + box-shadow: + 0 4px 18px rgba(0, 0, 0, 0.55), + inset 0 0 22px rgba(255, 79, 163, 0.06); + + position: sticky; + top: 0; + z-index: 80; + + padding: 0.9rem 1.4rem; + display: flex; + align-items: center; + gap: 1rem; + + backdrop-filter: blur(14px); +} + +/* Inner layout */ +.gx-filterbar-inner { + width: 100%; + display: flex; + align-items: center; + gap: 1rem; +} + +/* Inputs */ +.gx-filterbar-inner .gx-input { + flex: 2; + min-width: 170px; +} + +/* Dropdowns / Selects */ +.gx-filter-select { + flex: 1; + min-width: 140px; + + background: var(--color-bg-elevated); + color: var(--color-text-primary); + + padding: 0.6rem 0.8rem; + border-radius: var(--radius); + border: 1px solid var(--color-border-soft); + + transition: border var(--transition-fast), box-shadow var(--transition-fast); + cursor: pointer; + + box-shadow: inset 0 0 14px rgba(255, 79, 163, 0.05); +} + +.gx-filter-select:hover { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +.gx-filter-select:focus { + outline: none; + border-color: var(--color-brand-hover); + box-shadow: var(--shadow-glow-pink); +} + +/* Buttons */ +.gx-filterbar-inner .gx-button { + flex-shrink: 0; +} + +/* Mobile collapse toggle */ +.gx-filter-toggle { + display: none; + background: transparent; + border: none; + color: var(--color-brand); + font-size: 1.2rem; + cursor: pointer; + padding: 0.25rem; +} + +/* Mobile layout */ +@media (max-width: 900px) { + .gx-filterbar { + padding: 0.8rem 1rem; + } + + .gx-filter-toggle { + display: block; + } + + .gx-filterbar-inner { + display: none; + flex-direction: column; + align-items: stretch; + gap: 0.75rem; + width: 100%; + margin-top: 0.9rem; + } + + .gx-filterbar.open .gx-filterbar-inner { + display: flex; + } + + .gx-filter-select, + .gx-filterbar-inner .gx-input { + width: 100%; + min-width: unset; + } +} diff --git a/internal/web/static/css/gx/GX_FilterBar.html b/internal/web/static/css/gx/GX_FilterBar.html new file mode 100644 index 0000000..9c572e2 --- /dev/null +++ b/internal/web/static/css/gx/GX_FilterBar.html @@ -0,0 +1,29 @@ +
+ + +
+ + + + + + + + + + +
+
diff --git a/internal/web/static/css/gx/GX_Input.css b/internal/web/static/css/gx/GX_Input.css new file mode 100644 index 0000000..59c9569 --- /dev/null +++ b/internal/web/static/css/gx/GX_Input.css @@ -0,0 +1,96 @@ +/* + * GX INPUT — Premium Minimal Neon + * Dark-only, Flamingo Pink subtle glow + */ + +.gx-input { + width: 100%; + display: flex; + flex-direction: column; + gap: 0.4rem; +} + +/* Label */ +.gx-input label { + font-size: 0.9rem; + font-weight: 500; + color: var(--color-text-secondary); +} + +/* Field wrapper */ +.gx-input-field { + position: relative; + width: 100%; +} + +/* Input element */ +.gx-input-field input, +.gx-input-field textarea { + width: 100%; + padding: 0.85rem 1rem; + border-radius: var(--radius); + background: var(--color-bg-card); + + border: 1px solid var(--color-border-soft); + + font-size: 1rem; + color: var(--color-text-primary); + + outline: none; + + transition: + border-color var(--transition-fast), + background var(--transition-fast), + box-shadow var(--transition-fast), + transform var(--transition-fast); +} + +/* Placeholder */ +.gx-input-field input::placeholder, +.gx-input-field textarea::placeholder { + color: var(--color-text-secondary); + opacity: 0.55; +} + +/* Hover */ +.gx-input-field input:hover, +.gx-input-field textarea:hover { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Focus — premium neon glow */ +.gx-input-field input:focus, +.gx-input-field textarea:focus { + border-color: var(--color-brand); + box-shadow: + var(--shadow-glow-pink), + 0 0 0 2px rgba(255, 79, 163, 0.18); + background: rgba(255, 79, 163, 0.06); + transform: translateY(-1px); +} + +/* Disabled */ +.gx-input-field input:disabled, +.gx-input-field textarea:disabled { + opacity: 0.45; + cursor: not-allowed; +} + +/* Error state */ +.gx-input.error input, +.gx-input.error textarea { + border-color: var(--color-warning); + box-shadow: 0 0 10px rgba(255, 170, 136, 0.2); +} + +.gx-input.error label { + color: var(--color-warning); +} + +/* Small hint text */ +.gx-input-hint { + font-size: 0.8rem; + color: var(--color-text-secondary); + opacity: 0.7; +} diff --git a/internal/web/static/css/gx/GX_Input.html b/internal/web/static/css/gx/GX_Input.html new file mode 100644 index 0000000..7ef2bb2 --- /dev/null +++ b/internal/web/static/css/gx/GX_Input.html @@ -0,0 +1,43 @@ +
+ + +
+ +
+ +
+
+ +
+ + +
+ +
+ +
+
Your key is stored securely on-device.
+
+ +
+ + +
+ +
+ +
+
Invalid scene ID.
+
+ +
+ + +
+ +
+ +
+
+ +
diff --git a/internal/web/static/css/gx/GX_Loader.css b/internal/web/static/css/gx/GX_Loader.css new file mode 100644 index 0000000..d9e0667 --- /dev/null +++ b/internal/web/static/css/gx/GX_Loader.css @@ -0,0 +1,65 @@ +/* + * GX LOADER — Premium Minimal Neon + * Elegant Flamingo Pink spinner with subtle glow. + */ + +.gx-loader { + width: 42px; + height: 42px; + border-radius: 50%; + border: 4px solid rgba(255, 79, 163, 0.18); + border-top-color: var(--color-brand); + + animation: gx-spin 0.9s linear infinite; + + box-shadow: 0 0 14px rgba(255, 79, 163, 0.25); +} + +/* Smaller inline version */ +.gx-loader-sm { + width: 26px; + height: 26px; + border-width: 3px; +} + +/* Larger hero version */ +.gx-loader-lg { + width: 64px; + height: 64px; + border-width: 6px; +} + +/* Centered container */ +.gx-loader-center { + width: 100%; + display: flex; + justify-content: center; + padding: 2.5rem 0; +} + +@keyframes gx-spin { + from { transform: rotate(0deg); } + to { transform: rotate(360deg); } +} + + +/* ===== Optional Pulse Aura ===== */ + +.gx-loader-pulse { + position: relative; +} + +.gx-loader-pulse::after { + content: ""; + position: absolute; + inset: 0; + border-radius: 50%; + background: var(--color-brand-glow); + animation: gx-pulse 1.8s ease-in-out infinite; +} + +@keyframes gx-pulse { + 0% { transform: scale(1); opacity: 0.45; } + 50% { transform: scale(1.4); opacity: 0.15; } + 100% { transform: scale(1); opacity: 0.45; } +} diff --git a/internal/web/static/css/gx/GX_Loader.html b/internal/web/static/css/gx/GX_Loader.html new file mode 100644 index 0000000..ef54184 --- /dev/null +++ b/internal/web/static/css/gx/GX_Loader.html @@ -0,0 +1,19 @@ +
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
diff --git a/internal/web/static/css/gx/GX_Modal.css b/internal/web/static/css/gx/GX_Modal.css new file mode 100644 index 0000000..1956970 --- /dev/null +++ b/internal/web/static/css/gx/GX_Modal.css @@ -0,0 +1,103 @@ +/* + * GX MODAL + * Dark-only / Flamingo Pink neon glow / Premium cyberdeck style + */ + +/* BACKDROP */ +.gx-modal-backdrop { + position: fixed; + inset: 0; + background: rgba(0, 0, 0, 0.82); + backdrop-filter: blur(8px); + -webkit-backdrop-filter: blur(8px); + opacity: 0; + pointer-events: none; + transition: opacity 0.25s ease; + z-index: 900; +} + +.gx-modal-backdrop.active { + opacity: 1; + pointer-events: all; +} + +/* MODAL CONTAINER */ +.gx-modal { + position: fixed; + top: 50%; + left: 50%; + transform: translate(-50%, -42%) scale(0.92); + background: var(--color-bg-card); + border-radius: var(--radius-soft); + border: 1px solid var(--color-border-soft); + box-shadow: + var(--shadow-elevated), + 0 0 22px rgba(255, 79, 163, 0.15); + width: min(520px, 92%); + padding: 2rem; + opacity: 0; + transition: + opacity var(--transition), + transform var(--transition); + z-index: 901; +} + +.gx-modal.active { + opacity: 1; + transform: translate(-50%, -50%) scale(1); +} + +/* HEADER */ +.gx-modal-header { + margin-bottom: 1.5rem; +} + +.gx-modal-title { + font-size: 1.4rem; + font-weight: 700; + color: var(--color-brand); + text-shadow: 0 0 8px var(--color-brand-glow); +} + +/* BODY */ +.gx-modal-body { + color: var(--color-text-secondary); + font-size: 1.05rem; + line-height: 1.55; + margin-bottom: 2rem; +} + +/* FOOTER */ +.gx-modal-footer { + display: flex; + justify-content: flex-end; + gap: 1rem; +} + +/* CLOSE BUTTON (top right, optional) */ +.gx-modal-close { + position: absolute; + top: 12px; + right: 14px; + font-size: 1.4rem; + color: var(--color-text-secondary); + cursor: pointer; + transition: color var(--transition-fast); +} + +.gx-modal-close:hover { + color: var(--color-brand); +} + +/* MOBILE RESPONSIVE */ +@media (max-width: 480px) { + .gx-modal { + padding: 1.4rem; + } + .gx-modal-title { + font-size: 1.25rem; + } + .gx-modal-body { + font-size: 1rem; + } +} diff --git a/internal/web/static/css/gx/GX_Modal.html b/internal/web/static/css/gx/GX_Modal.html new file mode 100644 index 0000000..562a889 --- /dev/null +++ b/internal/web/static/css/gx/GX_Modal.html @@ -0,0 +1,21 @@ + diff --git a/internal/web/static/css/gx/GX_Modal.js b/internal/web/static/css/gx/GX_Modal.js new file mode 100644 index 0000000..bb66943 --- /dev/null +++ b/internal/web/static/css/gx/GX_Modal.js @@ -0,0 +1,32 @@ + diff --git a/internal/web/static/css/gx/GX_Pagination.css b/internal/web/static/css/gx/GX_Pagination.css new file mode 100644 index 0000000..74ab1d2 --- /dev/null +++ b/internal/web/static/css/gx/GX_Pagination.css @@ -0,0 +1,77 @@ +/* + * GX PAGINATION + * Dark mode + Flamingo Pink medium glow + */ + +.gx-pagination { + display: flex; + justify-content: center; + align-items: center; + gap: 0.4rem; + + margin: 2.5rem 0; + padding: 0.5rem; + + flex-wrap: wrap; +} + +/* Page Button (core style) */ +.gx-page-btn { + min-width: 38px; + height: 38px; + + display: flex; + align-items: center; + justify-content: center; + + padding: 0 0.75rem; + font-size: 0.95rem; + + background: var(--color-bg-card); + color: var(--color-text-secondary); + border: 1px solid var(--color-border-soft); + + border-radius: var(--radius); + cursor: pointer; + + transition: all var(--transition-fast); +} + +.gx-page-btn:hover { + color: var(--color-brand); + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Active page */ +.gx-page-btn.active { + color: var(--color-brand-hover); + background: rgba(255, 79, 163, 0.15); + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink); +} + +/* Disabled */ +.gx-page-btn.disabled { + opacity: 0.35; + pointer-events: none; +} + +/* Ellipsis */ +.gx-ellipsis { + padding: 0 0.5rem; + opacity: 0.4; +} + +/* Mobile responsiveness */ +@media (max-width: 520px) { + .gx-page-btn { + min-width: 34px; + height: 34px; + font-size: 0.85rem; + } + + .gx-page-btn.text-label { + display: none; /* hide 'Next' / 'Previous' text labels */ + } +} diff --git a/internal/web/static/css/gx/GX_Pagination.html b/internal/web/static/css/gx/GX_Pagination.html new file mode 100644 index 0000000..e89f702 --- /dev/null +++ b/internal/web/static/css/gx/GX_Pagination.html @@ -0,0 +1,44 @@ +
+ + + + ‹ + Prev + + + + {{if gt .Current 3}} + 1 + {{end}} + + + {{if gt .Current 4}} + + {{end}} + + + {{range .Pages}} + + {{.}} + + {{end}} + + + {{if lt .Current (sub .Total 3)}} + + {{end}} + + + {{if lt .Current (sub .Total 2)}} + {{.Total}} + {{end}} + + + + Next + › + + +
diff --git a/internal/web/static/css/gx/GX_Pagination.js b/internal/web/static/css/gx/GX_Pagination.js new file mode 100644 index 0000000..235e60e --- /dev/null +++ b/internal/web/static/css/gx/GX_Pagination.js @@ -0,0 +1,14 @@ +document.querySelectorAll(".gx-pagination").forEach(pag => { + pag.addEventListener("keydown", e => { + const buttons = [...pag.querySelectorAll(".gx-page-btn:not(.disabled)")]; + const active = pag.querySelector(".gx-page-btn.active"); + const index = buttons.indexOf(active); + + if (e.key === "ArrowRight" && index < buttons.length - 1) { + buttons[index + 1].focus(); + } + if (e.key === "ArrowLeft" && index > 0) { + buttons[index - 1].focus(); + } + }); +}); diff --git a/internal/web/static/css/gx/GX_Radio.css b/internal/web/static/css/gx/GX_Radio.css new file mode 100644 index 0000000..3473876 --- /dev/null +++ b/internal/web/static/css/gx/GX_Radio.css @@ -0,0 +1,82 @@ +/* + * GX RADIO — Premium Minimal Neon + * Dark-only, Flamingo Pink accent with subtle glow. + */ + +.gx-radio { + position: relative; + display: inline-flex; + align-items: center; + gap: 0.55rem; + cursor: pointer; + user-select: none; + font-size: 1rem; + color: var(--color-text-primary); +} + +/* Hide actual input */ +.gx-radio input { + position: absolute; + opacity: 0; + pointer-events: none; +} + +/* Outer circle */ +.gx-radio-mark { + width: 20px; + height: 20px; + border-radius: 50%; + border: 2px solid var(--color-border-soft); + display: flex; + justify-content: center; + align-items: center; + transition: + border-color var(--transition-fast), + box-shadow var(--transition), + background var(--transition-fast); + box-shadow: 0 0 6px rgba(255, 79, 163, 0.15); +} + +/* Inner dot */ +.gx-radio-mark::after { + content: ""; + width: 10px; + height: 10px; + background: var(--color-brand); + border-radius: 50%; + transform: scale(0); + transition: transform var(--transition-fast); + box-shadow: 0 0 14px rgba(255, 79, 163, 0.25); +} + +/* Checked */ +.gx-radio input:checked + .gx-radio-mark { + border-color: var(--color-brand); + box-shadow: 0 0 14px rgba(255, 79, 163, 0.38); +} + +.gx-radio input:checked + .gx-radio-mark::after { + transform: scale(1); +} + +/* Hover */ +.gx-radio:hover .gx-radio-mark { + border-color: var(--color-brand-hover); + box-shadow: 0 0 14px rgba(255, 79, 163, 0.28); +} + +/* Keyboard focus */ +.gx-radio input:focus-visible + .gx-radio-mark { + outline: 2px solid var(--color-brand); + outline-offset: 4px; +} + +/* Disabled */ +.gx-radio input:disabled + .gx-radio-mark { + opacity: 0.35; + cursor: not-allowed; +} + +.gx-radio input:disabled ~ span { + opacity: 0.35; +} diff --git a/internal/web/static/css/gx/GX_Radio.html b/internal/web/static/css/gx/GX_Radio.html new file mode 100644 index 0000000..e91285c --- /dev/null +++ b/internal/web/static/css/gx/GX_Radio.html @@ -0,0 +1,27 @@ +
+ + + + + + + + + +
diff --git a/internal/web/static/css/gx/GX_SegmentedControl.css b/internal/web/static/css/gx/GX_SegmentedControl.css new file mode 100644 index 0000000..7baa4d0 --- /dev/null +++ b/internal/web/static/css/gx/GX_SegmentedControl.css @@ -0,0 +1,54 @@ +/* + * GX SEGMENTED CONTROL + * Dark mode only, Flamingo Pink neon accents, subtle glow + */ + +.gx-segmented { + display: inline-flex; + background: var(--color-bg-card); + padding: 4px; + border-radius: var(--radius-soft); + border: 1px solid var(--color-border-soft); + box-shadow: inset 0 0 18px rgba(255, 79, 163, 0.06); + position: relative; + gap: 4px; +} + +.gx-segmented button { + flex: 1; + background: transparent; + border: none; + padding: 0.65rem 1.2rem; + color: var(--color-text-secondary); + font-size: 0.95rem; + font-weight: 500; + border-radius: var(--radius); + cursor: pointer; + transition: + color var(--transition-fast), + background var(--transition-fast), + box-shadow var(--transition-fast); +} + +/* ACTIVE STATE */ +.gx-segmented button.active { + background: rgba(255, 79, 163, 0.15); + color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* HOVER */ +.gx-segmented button:hover { + color: var(--color-brand-hover); +} + +/* DISABLED */ +.gx-segmented button.disabled { + opacity: 0.4; + cursor: not-allowed; +} + +/* FULL WIDTH OPTION */ +.gx-segmented.full { + width: 100%; +} diff --git a/internal/web/static/css/gx/GX_SegmentedControl.html b/internal/web/static/css/gx/GX_SegmentedControl.html new file mode 100644 index 0000000..16773ca --- /dev/null +++ b/internal/web/static/css/gx/GX_SegmentedControl.html @@ -0,0 +1,30 @@ +
+

GX Segmented Control Demo

+ +
+ + + + +
+
+ + diff --git a/internal/web/static/css/gx/GX_Select.css b/internal/web/static/css/gx/GX_Select.css new file mode 100644 index 0000000..e1e3b9d --- /dev/null +++ b/internal/web/static/css/gx/GX_Select.css @@ -0,0 +1,120 @@ +/* + * GX SELECT — Custom Dropdown + * Dark mode only, Flamingo Pink neon accents, medium glow + */ + +.gx-select { + position: relative; + display: inline-block; + width: 100%; + max-width: 420px; + font-size: 1rem; + color: var(--color-text-primary); +} + +/* The visible select box */ +.gx-select-trigger { + background: var(--color-bg-card); + padding: 0.85rem 1rem; + border-radius: var(--radius); + border: 1px solid var(--color-border-soft); + display: flex; + justify-content: space-between; + align-items: center; + cursor: pointer; + transition: border var(--transition), box-shadow var(--transition); + box-shadow: 0 0 12px rgba(255, 79, 163, 0.08); +} + +/* Label text inside select */ +.gx-select-trigger span { + opacity: 0.95; +} + +/* Down arrow */ +.gx-select-trigger svg { + width: 18px; + height: 18px; + fill: var(--color-text-secondary); + transition: transform var(--transition-fast), fill var(--transition-fast); +} + +/* Hover */ +.gx-select-trigger:hover { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +.gx-select.open .gx-select-trigger { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink); +} + +.gx-select.open .gx-select-trigger svg { + transform: rotate(180deg); + fill: var(--color-brand); +} + +/* Dropdown Menu */ +.gx-select-menu { + position: absolute; + left: 0; + right: 0; + top: calc(100% + 6px); + background: var(--color-bg-elevated); + border-radius: var(--radius); + border: 1px solid var(--color-border-soft); + box-shadow: 0 12px 34px rgba(0, 0, 0, 0.65), var(--shadow-glow-pink-soft); + padding: 0.4rem 0; + opacity: 0; + visibility: hidden; + transform: translateY(-4px); + transition: + opacity var(--transition-fast), + transform var(--transition-fast), + visibility 0s linear 0.2s; + z-index: 100; +} + +.gx-select.open .gx-select-menu { + opacity: 1; + visibility: visible; + transform: translateY(0px); + transition-delay: 0s; +} + +/* Menu items */ +.gx-option { + padding: 0.75rem 1rem; + cursor: pointer; + display: flex; + align-items: center; + transition: background var(--transition-fast), color var(--transition-fast); + color: var(--color-text-secondary); + font-size: 0.95rem; +} + +/* Hover state */ +.gx-option:hover { + background: rgba(255, 79, 163, 0.08); + color: var(--color-text-primary); +} + +/* Selected option (highlighted) */ +.gx-option.selected { + color: var(--color-brand); + font-weight: 600; +} + +/* Divider (optional) */ +.gx-select-divider { + height: 1px; + margin: 0.35rem 0; + background: rgba(255, 255, 255, 0.08); +} + +/* Disabled */ +.gx-option.disabled { + opacity: 0.35; + cursor: not-allowed; +} diff --git a/internal/web/static/css/gx/GX_Select.html b/internal/web/static/css/gx/GX_Select.html new file mode 100644 index 0000000..1305943 --- /dev/null +++ b/internal/web/static/css/gx/GX_Select.html @@ -0,0 +1,67 @@ +
+ +

GX Select Demo

+ +
+
+ Select Performer Tag + +
+ +
+
Softcore
+
Hardcore
+
Solo
+
Lesbian
+
POV
+ +
+ +
VR
+
Premium Only
+
+
+ +
+ + + diff --git a/internal/web/static/css/gx/GX_SelectMenu.css b/internal/web/static/css/gx/GX_SelectMenu.css new file mode 100644 index 0000000..b86bc42 --- /dev/null +++ b/internal/web/static/css/gx/GX_SelectMenu.css @@ -0,0 +1,115 @@ +/* + * GX SELECT MENU + * Flamingo Pink dark-mode animated dropdown + */ + +.gx-select { + position: relative; + width: 100%; + user-select: none; +} + +.gx-select-trigger { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + padding: 0.75rem 1rem; + color: var(--color-text-primary); + font-size: 1rem; + + display: flex; + justify-content: space-between; + align-items: center; + cursor: pointer; + + transition: border var(--transition-fast), background var(--transition-fast); +} + +.gx-select-trigger:hover { + border-color: var(--color-brand); +} + +.gx-select-trigger .arrow { + margin-left: 0.5rem; + transition: transform var(--transition-fast); +} + +.gx-select.open .gx-select-trigger .arrow { + transform: rotate(180deg); +} + +/* DROPDOWN PANEL */ +.gx-select-menu { + position: absolute; + top: calc(100% + 6px); + left: 0; + width: 100%; + max-height: 260px; + overflow-y: auto; + + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + padding: 6px 0; + + opacity: 0; + scale: 0.96; + pointer-events: none; + + box-shadow: + 0 12px 32px rgba(0, 0, 0, 0.6), + 0 0 24px rgba(255, 79, 163, 0.20); + + transition: + opacity 0.17s ease-out, + scale 0.14s cubic-bezier(0.15, 0.9, 0.25, 1.3); + + z-index: 9999; +} + +/* When open */ +.gx-select.open .gx-select-menu { + opacity: 1; + pointer-events: auto; + scale: 1; +} + +/* ITEM */ +.gx-select-item { + padding: 0.75rem 1rem; + color: var(--color-text-primary); + font-size: 0.95rem; + cursor: pointer; + + transition: background var(--transition-fast), + color var(--transition-fast); +} + +.gx-select-item:hover { + background: rgba(255, 79, 163, 0.12); + color: var(--color-brand); +} + +/* Selected state */ +.gx-select-item.selected { + background: rgba(255, 79, 163, 0.18); + color: var(--color-brand-hover); +} + +/* Divider option */ +.gx-select-divider { + height: 1px; + background: rgba(255, 79, 163, 0.15); + margin: 6px 0; +} + +/* Scrollbar */ +.gx-select-menu::-webkit-scrollbar { + width: 10px; +} + +.gx-select-menu::-webkit-scrollbar-thumb { + background: var(--color-brand); + border-radius: 6px; + box-shadow: var(--shadow-glow-pink-soft); +} diff --git a/internal/web/static/css/gx/GX_SelectMenu.html b/internal/web/static/css/gx/GX_SelectMenu.html new file mode 100644 index 0000000..299d3e9 --- /dev/null +++ b/internal/web/static/css/gx/GX_SelectMenu.html @@ -0,0 +1,16 @@ +
+
+ Choose an option + +
+ + +
diff --git a/internal/web/static/css/gx/GX_SelectMenu.js b/internal/web/static/css/gx/GX_SelectMenu.js new file mode 100644 index 0000000..c7c51db --- /dev/null +++ b/internal/web/static/css/gx/GX_SelectMenu.js @@ -0,0 +1,78 @@ +document.querySelectorAll(".gx-select").forEach(select => { + const trigger = select.querySelector(".gx-select-trigger"); + const menu = select.querySelector(".gx-select-menu"); + const valueElement = select.querySelector(".gx-select-value"); + const items = menu.querySelectorAll(".gx-select-item"); + + let open = false; + let index = -1; + + /* OPEN/CLOSE */ + trigger.addEventListener("click", () => toggleMenu()); + + function toggleMenu() { + open = !open; + select.classList.toggle("open", open); + + if (open) { + index = -1; + positionMenu(); + } + } + + /* SMART POSITIONING */ + function positionMenu() { + const rect = menu.getBoundingClientRect(); + if (rect.bottom > window.innerHeight) { + menu.style.top = "auto"; + menu.style.bottom = "calc(100% + 6px)"; + } + } + + /* ITEM CLICK */ + items.forEach((item, i) => { + item.addEventListener("click", () => { + items.forEach(i => i.classList.remove("selected")); + item.classList.add("selected"); + valueElement.textContent = item.textContent; + + select.dataset.value = item.dataset.value; + toggleMenu(); + }); + }); + + /* CLICK OUTSIDE */ + document.addEventListener("click", e => { + if (!select.contains(e.target)) { + select.classList.remove("open"); + open = false; + } + }); + + /* KEYBOARD NAVIGATION */ + trigger.addEventListener("keydown", e => { + if (!open && (e.key === "ArrowDown" || e.key === "Enter")) { + toggleMenu(); + return; + } + + if (!open) return; + + if (e.key === "ArrowDown") { + index = (index + 1) % items.length; + highlight(); + } else if (e.key === "ArrowUp") { + index = (index - 1 + items.length) % items.length; + highlight(); + } else if (e.key === "Enter") { + if (index >= 0) items[index].click(); + } else if (e.key === "Escape") { + toggleMenu(); + } + }); + + function highlight() { + items.forEach(i => i.classList.remove("selected")); + if (index >= 0) items[index].classList.add("selected"); + } +}); diff --git a/internal/web/static/css/gx/GX_Table.css b/internal/web/static/css/gx/GX_Table.css new file mode 100644 index 0000000..cbf010a --- /dev/null +++ b/internal/web/static/css/gx/GX_Table.css @@ -0,0 +1,123 @@ +/* + * GX TABLE — Dark Luxury Data Grid + * Flamingo Pink accents, subtle glow, smooth hover interactions + */ + +/* WRAPPER (scroll-safe on mobile) */ +.gx-table-wrapper { + width: 100%; + overflow-x: auto; + padding-bottom: 0.5rem; +} + +/* TABLE */ +.gx-table { + width: 100%; + border-collapse: collapse; + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius-soft); + box-shadow: var(--shadow-elevated); + overflow: hidden; +} + +/* HEADER */ +.gx-table thead th { + background: var(--color-bg-elevated); + color: var(--color-text-primary); + text-align: left; + + font-weight: 600; + padding: 0.85rem 1.1rem; + font-size: 0.9rem; + + border-bottom: 1px solid var(--color-border-soft); + position: sticky; + top: 0; + + backdrop-filter: blur(10px); + z-index: 2; +} + +/* HEADER SORTABLE STATE */ +.gx-table th.sortable { + cursor: pointer; + transition: color var(--transition-fast), text-shadow var(--transition-fast); +} + +.gx-table th.sortable:hover { + color: var(--color-brand); + text-shadow: 0 0 8px var(--color-brand-glow); +} + +/* BODY ROWS */ +.gx-table tbody tr { + transition: background var(--transition-fast), box-shadow var(--transition-fast); +} + +.gx-table tbody tr:hover { + background: rgba(255, 79, 163, 0.05); + box-shadow: inset 0 0 18px rgba(255, 79, 163, 0.08); +} + +/* CELLS */ +.gx-table td { + padding: 0.75rem 1.1rem; + border-bottom: 1px solid rgba(255, 79, 163, 0.08); + color: var(--color-text-secondary); + font-size: 0.9rem; +} + +/* FINAL ROW BORDER REMOVAL */ +.gx-table tbody tr:last-child td { + border-bottom: none; +} + +/* CLICKABLE ROW */ +.gx-table-row-link { + cursor: pointer; +} + +.gx-table-row-link:hover td { + color: var(--color-text-primary); +} + +/* MOBILE (stack columns) — optional but recommended */ +@media (max-width: 750px) { + .gx-table thead { + display: none; + } + + .gx-table, + .gx-table tbody, + .gx-table tr, + .gx-table td { + display: block; + width: 100%; + } + + .gx-table tr { + margin-bottom: 1rem; + background: var(--color-bg-card); + border-radius: var(--radius); + box-shadow: var(--shadow-elevated); + padding: 0.8rem; + } + + .gx-table td { + border-bottom: none; + padding: 0.5rem 0; + color: var(--color-text-primary); + font-size: 1rem; + position: relative; + } + + .gx-table td::before { + content: attr(data-label); + font-size: 0.75rem; + color: var(--color-text-secondary); + text-transform: uppercase; + letter-spacing: 0.05em; + display: block; + } +} diff --git a/internal/web/static/css/gx/GX_Table.html b/internal/web/static/css/gx/GX_Table.html new file mode 100644 index 0000000..7741ad9 --- /dev/null +++ b/internal/web/static/css/gx/GX_Table.html @@ -0,0 +1,26 @@ +
+ + + + + + + + + + + + {{range .Performers}} + + + + + + + + {{end}} + +
NameScenesTags
+ +
diff --git a/internal/web/static/css/gx/GX_Tabs.css b/internal/web/static/css/gx/GX_Tabs.css new file mode 100644 index 0000000..8e5f1f2 --- /dev/null +++ b/internal/web/static/css/gx/GX_Tabs.css @@ -0,0 +1,72 @@ +/* + * GX TABS + * Dark mode only, Flamingo Pink neon accents, subtle glow + */ + +.gx-tabs { + display: flex; + flex-direction: column; + width: 100%; +} + +/* TAB LIST (header bar) */ +.gx-tab-list { + display: flex; + gap: 0.5rem; + padding: 0.75rem; + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius-soft); + box-shadow: inset 0 0 20px rgba(255, 79, 163, 0.06); +} + +/* TAB BUTTON */ +.gx-tab { + background: transparent; + border: none; + padding: 0.8rem 1.4rem; + border-radius: var(--radius); + font-weight: 500; + cursor: pointer; + color: var(--color-text-secondary); + transition: + background var(--transition-fast), + color var(--transition-fast), + box-shadow var(--transition-fast); +} + +/* HOVER */ +.gx-tab:hover { + color: var(--color-brand-hover); +} + +/* ACTIVE TAB */ +.gx-tab.active { + background: rgba(255, 79, 163, 0.18); + color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* DISABLED */ +.gx-tab.disabled { + opacity: 0.4; + cursor: not-allowed; +} + +/* TAB CONTENT AREA */ +.gx-tab-panels { + margin-top: 1rem; + background: var(--color-bg-card); + padding: 2rem; + border-radius: var(--radius); + border: 1px solid var(--color-border-soft); + box-shadow: var(--shadow-elevated); +} + +.gx-tab-panel { + display: none; +} + +.gx-tab-panel.active { + display: block; +} diff --git a/internal/web/static/css/gx/GX_Tabs.html b/internal/web/static/css/gx/GX_Tabs.html new file mode 100644 index 0000000..ba78baf --- /dev/null +++ b/internal/web/static/css/gx/GX_Tabs.html @@ -0,0 +1,58 @@ +
+

GX Tabs Demo

+ +
+ + +
+ + + + +
+ + +
+
+

Overview

+

General dashboard info here.

+
+ +
+

Scenes

+

Scene list, filters, metadata.

+
+ +
+

Performers

+

Performer profile data.

+
+ +
+

Analytics

+

Charts, stats, insights.

+
+
+
+
+ + + diff --git a/internal/web/static/css/gx/GX_Tag.css b/internal/web/static/css/gx/GX_Tag.css new file mode 100644 index 0000000..a48e767 --- /dev/null +++ b/internal/web/static/css/gx/GX_Tag.css @@ -0,0 +1,84 @@ +/* + * GX TAG COMPONENT + * Dark mode + Flamingo Pink accents + * Supports: default, clickable, removable, outline + */ + +.gx-tag { + display: inline-flex; + align-items: center; + gap: 0.35rem; + + padding: 6px 12px; + font-size: 0.82rem; + font-weight: 500; + + background: var(--color-bg-card); + color: var(--color-text-primary); + + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + + box-shadow: 0 0 12px rgba(255, 79, 163, 0.08); + + cursor: default; + user-select: none; + + transition: all var(--transition-fast); +} + +.gx-tag + .gx-tag { + margin-left: 0.4rem; +} + +/* Hover effect (clickable tags) */ +.gx-tag.clickable { + cursor: pointer; +} + +.gx-tag.clickable:hover { + border-color: var(--color-brand); + box-shadow: var(--shadow-glow-pink-soft); + color: var(--color-brand-hover); +} + +/* Active tag (for filtering) */ +.gx-tag.active { + background: rgba(255, 79, 163, 0.12); + border-color: var(--color-brand); + color: var(--color-brand-hover); + box-shadow: var(--shadow-glow-pink); +} + +/* Outline variant */ +.gx-tag-outline { + background: transparent; + border-color: var(--color-brand); + color: var(--color-brand); +} + +.gx-tag-outline:hover { + box-shadow: var(--shadow-glow-pink); +} + +/* Removable (X button) */ +.gx-tag .remove-btn { + font-size: 0.95rem; + opacity: 0.7; + cursor: pointer; + + transition: opacity var(--transition-fast); +} + +.gx-tag .remove-btn:hover { + opacity: 1; + color: var(--color-brand-hover); +} + +/* Tag grids (nice wrapping for long lists) */ +.gx-tag-list { + display: flex; + flex-wrap: wrap; + gap: 0.4rem; + margin: 1rem 0; +} diff --git a/internal/web/static/css/gx/GX_Toast.css b/internal/web/static/css/gx/GX_Toast.css new file mode 100644 index 0000000..5a0c56d --- /dev/null +++ b/internal/web/static/css/gx/GX_Toast.css @@ -0,0 +1,95 @@ +/* + * GX TOAST + * Dark-only / Flamingo Pink glow / Stackable notifications + */ + +.gx-toast-container { + position: fixed; + right: 1.5rem; + bottom: 1.5rem; + display: flex; + flex-direction: column; + gap: 0.75rem; + z-index: 999; +} + +/* Base toast */ +.gx-toast { + min-width: 260px; + max-width: 360px; + + background: var(--color-bg-card); + color: var(--color-text-primary); + padding: 1rem 1.2rem; + border-radius: var(--radius); + + border-left: 4px solid var(--color-border-soft); + box-shadow: var(--shadow-elevated); + + display: flex; + justify-content: space-between; + align-items: start; + gap: 1rem; + + opacity: 0; + transform: translateY(12px); + animation: toast-in 0.35s var(--transition) forwards; +} + +/* Toast types */ +.gx-toast.success { border-color: #4FEA9C; } +.gx-toast.info { border-color: var(--color-info); } +.gx-toast.warn { border-color: var(--color-warning); } +.gx-toast.error { border-color: #FF5C5C; } + +.gx-toast strong { + font-weight: 600; + color: var(--color-brand); +} + +.gx-toast-close { + cursor: pointer; + font-size: 1.2rem; + color: var(--color-text-secondary); + transition: color var(--transition-fast); +} + +.gx-toast-close:hover { + color: var(--color-brand); +} + +/* OUT animation (called when dismissed) */ +.gx-toast.hide { + animation: toast-out 0.3s var(--transition) forwards; +} + +/* Animations */ +@keyframes toast-in { + from { + opacity: 0; + transform: translateY(12px) scale(0.97); + } + to { + opacity: 1; + transform: translateY(0) scale(1); + } +} + +@keyframes toast-out { + to { + opacity: 0; + transform: translateY(12px) scale(0.97); + } +} + +/* Mobile */ +@media (max-width: 480px) { + .gx-toast-container { + right: 0.75rem; + bottom: 0.75rem; + left: 0.75rem; + } + .gx-toast { + max-width: 100%; + } +} diff --git a/internal/web/static/css/gx/GX_Toast.html b/internal/web/static/css/gx/GX_Toast.html new file mode 100644 index 0000000..ce4931f --- /dev/null +++ b/internal/web/static/css/gx/GX_Toast.html @@ -0,0 +1 @@ +
diff --git a/internal/web/static/css/gx/GX_Toast.js b/internal/web/static/css/gx/GX_Toast.js new file mode 100644 index 0000000..a030d9b --- /dev/null +++ b/internal/web/static/css/gx/GX_Toast.js @@ -0,0 +1,28 @@ + diff --git a/internal/web/static/css/gx/GX_Toggle.css b/internal/web/static/css/gx/GX_Toggle.css new file mode 100644 index 0000000..ef109be --- /dev/null +++ b/internal/web/static/css/gx/GX_Toggle.css @@ -0,0 +1,88 @@ +/* + * GX TOGGLE — Minimal Neon Switch + * Dark-only, Flamingo Pink accents, medium glow + */ + +.gx-toggle { + position: relative; + display: inline-flex; + align-items: center; + gap: 0.65rem; + cursor: pointer; + user-select: none; + font-size: 1rem; + color: var(--color-text-primary); +} + +/* Hide native checkbox */ +.gx-toggle input { + position: absolute; + opacity: 0; + pointer-events: none; +} + +/* Track */ +.gx-toggle-track { + width: 46px; + height: 24px; + background: var(--color-bg-card); + border-radius: 30px; + border: 1px solid var(--color-border-soft); + position: relative; + transition: + background var(--transition-fast), + border-color var(--transition-fast), + box-shadow var(--transition-fast); + box-shadow: 0 0 10px rgba(255, 79, 163, 0.15); +} + +/* Thumb */ +.gx-toggle-thumb { + width: 20px; + height: 20px; + background: var(--color-text-primary); + border-radius: 50%; + position: absolute; + top: 1.5px; + left: 2px; + transition: + transform var(--transition-fast), + background var(--transition-fast), + box-shadow var(--transition-fast); + box-shadow: 0 0 6px rgba(255, 255, 255, 0.2); +} + +/* Hover (slight brighten) */ +.gx-toggle:hover .gx-toggle-track { + border-color: var(--color-brand-hover); + box-shadow: 0 0 14px rgba(255, 79, 163, 0.25); +} + +/* ON State */ +.gx-toggle input:checked + .gx-toggle-track { + background: var(--color-brand); + border-color: var(--color-brand-hover); + box-shadow: 0 0 16px rgba(255, 79, 163, 0.35); +} + +.gx-toggle input:checked + .gx-toggle-track .gx-toggle-thumb { + transform: translateX(22px); + background: #fff; + box-shadow: 0 0 14px rgba(255, 79, 163, 0.4); +} + +/* Keyboard focus */ +.gx-toggle input:focus-visible + .gx-toggle-track { + outline: 2px solid var(--color-brand); + outline-offset: 4px; +} + +/* Disabled */ +.gx-toggle input:disabled + .gx-toggle-track { + opacity: 0.4; + cursor: not-allowed; +} + +.gx-toggle input:disabled ~ span { + opacity: 0.4; +} diff --git a/internal/web/static/css/gx/GX_Toggle.html b/internal/web/static/css/gx/GX_Toggle.html new file mode 100644 index 0000000..5e03ea2 --- /dev/null +++ b/internal/web/static/css/gx/GX_Toggle.html @@ -0,0 +1,27 @@ +
+ + + + + + + +
diff --git a/internal/web/static/css/gx/GX_Tooltip.css b/internal/web/static/css/gx/GX_Tooltip.css new file mode 100644 index 0000000..e7576f0 --- /dev/null +++ b/internal/web/static/css/gx/GX_Tooltip.css @@ -0,0 +1,58 @@ +/* + * GX TOOLTIP + * Minimal dark-only tooltip with subtle Flamingo Pink glow. + */ + +.gx-tooltip { + position: fixed; + z-index: 99999; + + background: var(--color-bg-card); + padding: 0.55rem 0.9rem; + border-radius: var(--radius); + border: 1px solid rgba(255, 79, 163, 0.20); + + color: var(--color-text-primary); + font-size: 0.85rem; + line-height: 1.4; + white-space: nowrap; + + box-shadow: 0 4px 16px rgba(0, 0, 0, 0.55), + 0 0 18px rgba(255, 79, 163, 0.12); /* very subtle */ + + opacity: 0; + transform: translateY(4px) scale(0.98); + pointer-events: none; + + transition: opacity 0.18s ease, + transform 0.18s ease; +} + +/* When visible */ +.gx-tooltip.show { + opacity: 1; + transform: translateY(0) scale(1); +} + +/* Optional: subtle arrow */ +.gx-tooltip::after { + content: ""; + position: absolute; + bottom: -6px; + left: 18px; + border-width: 6px 6px 0 6px; + border-style: solid; + border-color: var(--color-bg-card) transparent transparent transparent; + filter: drop-shadow(0 -2px 3px rgba(255, 79, 163, 0.15)); + opacity: 0.85; +} + +/* Mobile: center tooltips */ +@media (max-width: 600px) { + .gx-tooltip { + font-size: 0.9rem; + max-width: 85vw; + white-space: normal; + text-align: center; + } +} diff --git a/internal/web/static/css/gx/GX_Tooltip.js b/internal/web/static/css/gx/GX_Tooltip.js new file mode 100644 index 0000000..37c4ed7 --- /dev/null +++ b/internal/web/static/css/gx/GX_Tooltip.js @@ -0,0 +1,40 @@ + diff --git a/internal/web/static/css/layout.css b/internal/web/static/css/layout.css new file mode 100644 index 0000000..b1f9e3b --- /dev/null +++ b/internal/web/static/css/layout.css @@ -0,0 +1,267 @@ +/* + * GOONDEX LAYOUT + * Structure, spacing, navbar, hero, stats, responsive tiers. + */ + +/* ================================ + * MAIN PAGE WRAPPING + * =================================== */ + +body { + display: flex; + justify-content: center; + align-items: stretch; + min-height: 100vh; +} + +/* Main content (center column) */ +.main-wrapper { + flex: 1; + max-width: 1800px; + overflow-y: auto; + padding-bottom: 4rem; +} + +/* Shared container */ +.container { + max-width: 1700px; + margin: 0 auto; + padding: 0 1.5rem; +} + + +/* ================================ + * SIDE PANELS (OPTION A — scroll WITH page) + * =================================== */ + +.side-panel { + width: 220px; + flex-shrink: 0; + background: #000; + border-left: 1px solid var(--color-border-soft); + border-right: 1px solid var(--color-border-soft); + display: flex; + flex-direction: column; + overflow: hidden; +} + +.side-panel img { + width: 100%; + height: auto; + display: block; + object-fit: cover; + opacity: 0.75; + transition: opacity 0.25s ease; +} + +.side-panel img:hover { + opacity: 1; +} + + +/* ================================ + * NAVBAR + * =================================== */ + +.navbar { + background: var(--color-bg-card); + border-bottom: 1px solid var(--color-border-soft); + padding: 0.75rem 0; + position: sticky; + top: 0; + z-index: 40; + backdrop-filter: blur(6px); + box-shadow: var(--shadow-glow-pink-soft); +} + +.nav-inner { + display: flex; + align-items: center; + justify-content: space-between; +} + +/* Logo image control */ +.logo-img { + height: 42px; + width: auto; + display: block; +} + +/* Navbar links */ +.nav-links { + list-style: none; + display: flex; + gap: 2rem; +} + +.nav-links a { + color: var(--color-text-secondary); + text-decoration: none; + font-weight: 500; + transition: color var(--transition-fast); +} + +.nav-links a:hover, +.nav-links a.active { + color: var(--color-brand); +} + + +/* ================================ + * HERO SECTION + * =================================== */ + +.hero-section { + background: linear-gradient( + 135deg, + rgba(255, 79, 163, 0.10), + rgba(216, 132, 226, 0.05) + ); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius-soft); + padding: 4rem 3rem; + margin-bottom: 3rem; + position: relative; + overflow: hidden; + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Subtle radial neon glow (G-A) */ +.hero-section::after { + content: ""; + position: absolute; + inset: 0; + background: radial-gradient( + circle at 50% 20%, + rgba(255, 79, 163, 0.15), + rgba(255, 79, 163, 0.05) 40%, + transparent 75% + ); + pointer-events: none; +} + +.hero-title { + font-size: 3.2rem; + font-weight: 800; + background: linear-gradient( + 135deg, + var(--color-brand), + var(--color-header) + ); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.hero-subtitle { + margin-top: 1rem; + font-size: 1.2rem; + color: var(--color-text-secondary); + max-width: 580px; + margin-inline: auto; +} + + +/* ================================ + * STATS GRID + * =================================== */ + +.stats-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(260px, 1fr)); + gap: 1.5rem; + margin-bottom: 3rem; +} + +.stat-card { + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + border-radius: var(--radius); + padding: 1.5rem; + display: flex; + align-items: center; + justify-content: space-between; + gap: 1rem; + transition: transform 0.20s var(--transition), + box-shadow 0.20s var(--transition); +} + +.stat-card:hover { + transform: translateY(-4px); + box-shadow: var(--shadow-glow-pink); +} + +.stat-icon { + font-size: 2rem; + opacity: 0.85; +} + +.stat-content .stat-value { + font-size: 2rem; + font-weight: 700; +} + +.stat-content .stat-label { + color: var(--color-text-secondary); + font-size: 0.95rem; +} + +.stat-actions { + display: flex; + flex-direction: column; + gap: 0.5rem; + align-items: flex-end; +} + +.stat-link { + font-size: 0.85rem; + color: var(--color-brand-hover); + text-decoration: none; +} + +.stat-link:hover { + text-decoration: underline; +} + + +/* ================================ + * RESPONSIVE BREAKPOINTS + * =================================== */ + +/* --- Large screens under 1600px --- */ +@media (max-width: 1600px) { + .side-panel { + width: 180px; + } +} + +/* --- Hide side panels under 900px --- */ +@media (max-width: 900px) { + .side-panel { + display: none; + } + .main-wrapper { + padding: 0 0.5rem; + } + .logo-img { + height: 36px; + } +} + +/* --- Mobile adjustments (≤ 600px) --- */ +@media (max-width: 600px) { + .nav-links { + gap: 1rem; + } + + .hero-section { + padding: 2.5rem 1.5rem; + } + + .hero-title { + font-size: 2.4rem; + } + + .stats-grid { + grid-template-columns: 1fr; + } +} diff --git a/internal/web/static/css/pages.css b/internal/web/static/css/pages.css new file mode 100644 index 0000000..cd6853f --- /dev/null +++ b/internal/web/static/css/pages.css @@ -0,0 +1,271 @@ +/* + * GOONDEX — PAGE-SPECIFIC STYLES + * Performer pages, scene pages, studios, tables, lists, galleries. + * Fully aligned with dark-only theme + Flamingo Pink neon. + */ + +/* ============================================ + * GENERIC PAGE WRAPPER + * ============================================ */ +.page { + padding: 3rem 0; +} + +.page-header { + margin-bottom: 2rem; +} + +/* Title */ +.page-title { + font-size: 2.6rem; + font-weight: 800; + line-height: 1.2; + + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + + margin-bottom: 0.75rem; +} + +.page-subtitle { + font-size: 1.1rem; + color: var(--color-text-secondary); +} + +/* ============================================ + * TABLES + * ============================================ */ +.table { + width: 100%; + border-collapse: collapse; + margin-top: 1.5rem; + border-radius: var(--radius); + overflow: hidden; +} + +.table thead { + background: rgba(255, 79, 163, 0.08); +} + +.table th { + text-align: left; + padding: 0.9rem; + font-weight: 600; + color: var(--color-brand); + border-bottom: 1px solid var(--color-border-soft); +} + +.table td { + padding: 0.85rem; + border-bottom: 1px solid var(--color-border); + color: var(--color-text-primary); +} + +.table tr:hover { + background: rgba(255, 79, 163, 0.05); +} + +/* Small subtle fade */ +.table tr:last-child td { + border-bottom: none; +} + +/* ============================================ + * PERFORMER PAGE + * ============================================ */ +.performer-header { + display: flex; + gap: 2.2rem; + align-items: flex-start; + margin-bottom: 3rem; +} + +.performer-photo { + width: 220px; + height: 220px; + border-radius: var(--radius-soft); + object-fit: cover; + + border: 2px solid rgba(255, 79, 163, 0.35); + box-shadow: var(--shadow-glow-pink-soft); +} + +.performer-meta { + flex: 1; +} + +.performer-name { + font-size: 2.4rem; + font-weight: 700; + + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.performer-bio { + margin-top: 1rem; + color: var(--color-text-secondary); + line-height: 1.7; +} + +/* Performer tags */ +.performer-tags { + margin-top: 1.2rem; +} + +/* ============================================ + * SCENE PAGE + * ============================================ */ +.scene-header { + margin-bottom: 3rem; +} + +.scene-title { + font-size: 2.4rem; + font-weight: 700; + line-height: 1.2; + margin-bottom: 0.75rem; + + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.scene-meta { + display: flex; + flex-wrap: wrap; + gap: 1.2rem; + color: var(--color-text-secondary); +} + +/* Video preview image */ +.scene-cover { + width: 100%; + max-height: 480px; + border-radius: var(--radius-soft); + object-fit: cover; + + border: 2px solid rgba(255, 79, 163, 0.32); + box-shadow: var(--shadow-glow-pink-soft); + margin-bottom: 2.2rem; +} + +/* ============================================ + * STUDIO PAGE + * ============================================ */ +.studio-header { + margin-bottom: 3rem; +} + +.studio-name { + font-size: 2.6rem; + font-weight: 800; + margin-bottom: 0.75rem; + + background: linear-gradient(135deg, var(--color-brand), var(--color-header)); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.studio-description { + max-width: 700px; + color: var(--color-text-secondary); + line-height: 1.7; +} + +/* ============================================ + * GALLERY — GRID OF IMAGES + * ============================================ */ +.gallery { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(170px, 1fr)); + gap: 1rem; + margin-top: 2rem; +} + +.gallery img { + width: 100%; + height: 160px; + object-fit: cover; + border-radius: var(--radius); + transition: transform var(--transition), box-shadow var(--transition); + + border: 1px solid rgba(255, 79, 163, 0.3); + box-shadow: var(--shadow-elevated); +} + +.gallery img:hover { + transform: scale(1.03); + box-shadow: var(--shadow-glow-pink); +} + +/* ============================================ + * PAGINATION + * ============================================ */ +.pagination { + margin-top: 2.5rem; + display: flex; + justify-content: center; + gap: 0.6rem; +} + +.page-btn { + padding: 0.55rem 1rem; + border-radius: var(--radius); + background: var(--color-bg-card); + border: 1px solid var(--color-border-soft); + cursor: pointer; + color: var(--color-text-primary); + transition: background var(--transition), box-shadow var(--transition); +} + +.page-btn:hover { + background: rgba(255, 79, 163, 0.15); + box-shadow: var(--shadow-glow-pink-soft); +} + +.page-btn.active { + background: rgba(255, 79, 163, 0.25); + border-color: var(--color-brand); +} + +/* ============================================ + * RESPONSIVE BEHAVIOUR + * ============================================ */ +@media (max-width: 920px) { + .performer-header { + flex-direction: column; + align-items: center; + text-align: center; + } + + .performer-photo { + width: 200px; + height: 200px; + } +} + +@media (max-width: 720px) { + .page-title { + font-size: 2rem; + } + + .scene-title, + .studio-name, + .performer-name { + font-size: 2rem; + } +} + +@media (max-width: 540px) { + .gallery { + grid-template-columns: repeat(auto-fill, minmax(130px, 1fr)); + } + + .performer-photo { + width: 170px; + height: 170px; + } +} diff --git a/internal/web/static/css/style.css b/internal/web/static/css/style.css new file mode 100644 index 0000000..6354268 --- /dev/null +++ b/internal/web/static/css/style.css @@ -0,0 +1,1232 @@ +/* + * Goondex Web UI - Dark Mode with Flamingo Pulse Pink Branding + * Color Palette from v0.3.5-r1: + * - Brand/Accent: #FF4FA3 (Flamingo Pulse Pink) + * - Text Primary: #F8F8F8 (Soft White) + * - Secondary Text: #9BA0A8 (Muted Grey) + * - Section Headers: #D78BE0 (Lilac Tint) + * - Data Keypoints: #FF66C4 (Hot Pink) + * - Warnings: #FFAA88 (Peach Warning) + * - Info/Dates: #7EE7E7 (Cool Cyan) + */ + +/* Reset and base styles */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +/* Import GX Components */ +@import url("/static/css/gx/GX_Button.css"); +@import url("/static/css/gx/GX_Checkbox.css"); +@import url("/static/css/gx/GX_Input.css"); +@import url("/static/css/gx/GX_Loader.css"); + +:root { + --color-brand: #FF4FA3; + --color-brand-hover: #FF66C4; + --color-text-primary: #F8F8F8; + --color-text-secondary: #9BA0A8; + --color-header: #D78BE0; + --color-keypoint: #FF66C4; + --color-warning: #FFAA88; + --color-info: #7EE7E7; + --color-bg-dark: #09090b; + --color-bg-card: #18181b; + --color-bg-elevated: #27272a; + --color-border: #3f3f46; +} + +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif; + line-height: 1.6; + color: var(--color-text-primary); + background: var(--color-bg-dark); +} + +/* Navbar */ +.navbar { + background: var(--color-bg-card); + color: white; + padding: 1rem 0; + box-shadow: 0 2px 8px rgba(255, 79, 163, 0.1); + border-bottom: 1px solid var(--color-brand); +} + +.navbar .container { + display: flex; + justify-content: space-between; + align-items: center; +} + +.logo { + font-size: 1.5rem; + font-weight: bold; + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-header) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +.nav-links { + list-style: none; + display: flex; + gap: 2rem; +} + +.nav-links a { + color: var(--color-text-secondary); + text-decoration: none; + transition: color 0.3s; + font-weight: 500; +} + +.nav-links a:hover, +.nav-links a.active { + color: var(--color-brand); +} + +/* Container */ +.container { + max-width: 1400px; + margin: 0 auto; + padding: 0 1.5rem; +} + +main.container { + padding-top: 2rem; + padding-bottom: 4rem; +} + +/* Hero Section */ +.hero-section { + background: linear-gradient(135deg, rgba(255, 79, 163, 0.1) 0%, rgba(215, 139, 224, 0.05) 100%); + border: 1px solid rgba(255, 79, 163, 0.2); + border-radius: 20px; + padding: 4rem 3rem; + margin-bottom: 3rem; + text-align: center; + box-shadow: 0 8px 32px rgba(255, 79, 163, 0.15); + position: relative; + overflow: hidden; +} + +.hero-section::before { + content: ""; + position: absolute; + top: -50%; + right: -50%; + width: 200%; + height: 200%; + background: radial-gradient(circle, rgba(255, 79, 163, 0.08) 0%, transparent 70%); + animation: pulse-glow 8s ease-in-out infinite; +} + +@keyframes pulse-glow { + 0%, 100% { opacity: 0.3; transform: scale(1); } + 50% { opacity: 0.6; transform: scale(1.1); } +} + +.hero-content { + position: relative; + z-index: 1; +} + +.hero-title { + font-size: 3.5rem; + font-weight: 800; + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-header) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; + margin-bottom: 1rem; +} + +.hero-subtitle { + font-size: 1.25rem; + color: var(--color-text-secondary); + margin-bottom: 2rem; + max-width: 600px; + margin-left: auto; + margin-right: auto; +} + +.hero-actions { + display: flex; + gap: 1rem; + justify-content: center; + flex-wrap: wrap; +} + +/* Page header */ +.page-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 2rem; + flex-wrap: wrap; + gap: 1rem; +} + +.page-header h2 { + font-size: 2rem; + color: var(--color-header); +} + +/* Action buttons */ +.action-buttons { + display: flex; + gap: 0.5rem; +} + +/* ================================ + GX Button Styles (Premium Animated) + ================================ */ +.btn, .button { + display: inline-flex; + align-items: center; + justify-content: center; + padding: 15px 30px; + border: 0; + position: relative; + overflow: hidden; + border-radius: 10rem; + transition: all 0.02s; + font-weight: bold; + cursor: pointer; + background: #1e1e1e; + color: #ffb3d1; + z-index: 0; + box-shadow: 0 0px 7px -5px rgba(0, 0, 0, 0.5); + text-decoration: none; +} + +.btn:hover, .button:hover { + background: #ff5fa2; + color: #1e1e1e; +} + +.btn:active, .button:active { + transform: scale(0.97); +} + +/* Glow layer container */ +.hoverEffect { + position: absolute; + bottom: 0; + top: 0; + left: 0; + right: 0; + display: flex; + align-items: center; + justify-content: center; + z-index: 1; + pointer-events: none; +} + +/* Rotating neon blob */ +.hoverEffect div { + background: linear-gradient(90deg, #ff5fa2 0%, #ff9bcb 50%, #8c2f5c 100%); + border-radius: 40rem; + width: 10rem; + height: 10rem; + transition: 0.4s; + filter: blur(25px); + animation: effect 3s linear infinite; + opacity: 0.55; +} + +.btn:hover .hoverEffect div, +.button:hover .hoverEffect div { + width: 8rem; + height: 8rem; +} + +@keyframes effect { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} + +.btn-secondary { + background: var(--color-bg-elevated); + border: 2px solid var(--color-brand); + color: var(--color-brand); + box-shadow: none; +} + +.btn-secondary:hover { + background: rgba(255, 79, 163, 0.1); + border-color: var(--color-keypoint); + color: var(--color-keypoint); +} + +/* Search form */ +.search-form { + display: flex; + gap: 0.5rem; +} + +/* ================================ + GX Input Styles (Premium Animated) + ================================ */ +.search-form input, input.input, input[type="text"], input[type="search"] { + border-radius: 10px; + outline: 2px solid #ff5fa2; + border: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", "Helvetica Neue", sans-serif; + background-color: #ffb3d1; + outline-offset: 3px; + padding: 10px 1rem; + transition: 0.25s; + color: #1e1e1e; + width: 300px; +} + +.search-form input:focus, input.input:focus, input[type="text"]:focus, input[type="search"]:focus { + outline-offset: 5px; + background-color: #ffffff; + color: #1e1e1e; +} + +.search-form input::placeholder, input.input::placeholder { + color: #8c2f5c; + opacity: 0.7; +} + +.search-form button { + padding: 0.6rem 1.5rem; + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + color: white; + border: none; + border-radius: 6px; + cursor: pointer; + transition: all 0.3s; + font-weight: 500; +} + +.search-form button:hover { + background: linear-gradient(135deg, var(--color-keypoint) 0%, var(--color-brand) 100%); + box-shadow: 0 2px 8px rgba(255, 79, 163, 0.3); +} + +/* Stats grid */ +.stats-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); + gap: 1.5rem; + margin-bottom: 3rem; +} + +.stat-card { + background: var(--color-bg-card); + padding: 1.5rem; + border-radius: 12px; + border: 1px solid var(--color-bg-elevated); + display: flex; + flex-direction: column; + gap: 1rem; + transition: all 0.3s; +} + +.stat-card:hover { + border-color: var(--color-brand); + transform: translateY(-2px); + box-shadow: 0 4px 16px rgba(255, 79, 163, 0.2); +} + +.stat-icon { + font-size: 2.5rem; +} + +.stat-content { + flex: 1; +} + +.stat-value { + font-size: 2.5rem; + font-weight: bold; + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +.stat-label { + color: var(--color-text-secondary); + font-size: 1rem; +} + +.stat-link { + color: var(--color-brand); + text-decoration: none; + font-weight: 500; +} + +.stat-link:hover { + color: var(--color-keypoint); + text-decoration: underline; +} + +/* Quick actions */ +.quick-actions { + background: var(--color-bg-card); + padding: 1.5rem; + border-radius: 12px; + border: 1px solid var(--color-bg-elevated); +} + +.quick-actions h3 { + margin-bottom: 1rem; + color: var(--color-header); +} + +.help-text { + color: var(--color-text-secondary); + margin-bottom: 1rem; +} + +.code-block { + background: var(--color-bg-dark); + border: 1px solid var(--color-bg-elevated); + padding: 1rem; + border-radius: 6px; + font-family: 'Courier New', monospace; +} + +.code-block code { + color: var(--color-brand); + display: block; + margin: 0.25rem 0; +} + +/* Table */ +.table-container { + background: var(--color-bg-card); + border-radius: 12px; + border: 1px solid var(--color-bg-elevated); + overflow: hidden; +} + +.data-table { + width: 100%; + border-collapse: collapse; +} + +.data-table thead { + background: var(--color-bg-elevated); + color: white; +} + +.data-table th { + padding: 1rem; + text-align: left; + font-weight: 600; + color: var(--color-header); +} + +.data-table td { + padding: 1rem; + border-top: 1px solid var(--color-bg-elevated); + color: var(--color-text-primary); +} + +.data-table tbody tr { + transition: background 0.2s; +} + +.data-table tbody tr:hover { + background: var(--color-bg-elevated); +} + +.name-cell { + font-weight: 500; + color: var(--color-text-primary); +} + +.description-cell { + max-width: 300px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + color: var(--color-text-secondary); +} + +.btn-small { + padding: 10px 20px; + border: 0; + position: relative; + overflow: hidden; + border-radius: 10rem; + transition: all 0.02s; + font-weight: bold; + cursor: pointer; + background: #1e1e1e; + color: #ffb3d1; + z-index: 0; + box-shadow: 0 0px 7px -5px rgba(0, 0, 0, 0.5); + text-decoration: none; + display: inline-flex; + align-items: center; + justify-content: center; + font-size: 0.9rem; +} + +.btn-small:hover { + background: #ff5fa2; + color: #1e1e1e; +} + +.btn-small:active { + transform: scale(0.97); +} + +/* Empty state */ +.empty-state { + background: var(--color-bg-card); + border: 1px solid var(--color-bg-elevated); + padding: 3rem; + border-radius: 12px; + text-align: center; +} + +.empty-state p { + color: var(--color-text-secondary); + margin-bottom: 1rem; +} + +.empty-state code { + background: var(--color-bg-elevated); + padding: 0.25rem 0.5rem; + border-radius: 3px; + font-family: 'Courier New', monospace; + color: var(--color-brand); +} + +/* Detail views */ +.breadcrumb { + margin-bottom: 1.5rem; +} + +.breadcrumb a { + color: var(--color-brand); + text-decoration: none; +} + +.breadcrumb a:hover { + color: var(--color-keypoint); + text-decoration: underline; +} + +.detail-container { + background: var(--color-bg-card); + border: 1px solid var(--color-bg-elevated); + padding: 2rem; + border-radius: 12px; +} + +.detail-header { + display: flex; + justify-content: space-between; + align-items: start; + margin-bottom: 2rem; + padding-bottom: 1.5rem; + border-bottom: 2px solid var(--color-bg-elevated); +} + +.detail-header h2 { + color: var(--color-text-primary); + margin-bottom: 0.5rem; +} + +.aliases { + color: var(--color-text-secondary); + font-style: italic; +} + +.badge { + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + color: white; + padding: 0.5rem 1rem; + border-radius: 6px; + font-size: 0.9rem; + box-shadow: 0 2px 6px rgba(255, 79, 163, 0.3); +} + +.detail-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); + gap: 1.5rem; + margin-bottom: 2rem; +} + +.detail-section { + background: var(--color-bg-elevated); + padding: 1.5rem; + border-radius: 8px; + border: 1px solid var(--color-border); +} + +.detail-section h3 { + color: var(--color-header); + margin-bottom: 1rem; + font-size: 1.1rem; +} + +.detail-row { + display: flex; + justify-content: space-between; + padding: 0.5rem 0; + border-bottom: 1px solid var(--color-border); +} + +.detail-row:last-child { + border-bottom: none; +} + +.detail-row .label { + color: var(--color-text-secondary); + font-weight: 500; +} + +.detail-row .value { + color: var(--color-text-primary); + text-align: right; +} + +.detail-row .value a { + color: var(--color-brand); + text-decoration: none; +} + +.detail-row .value a:hover { + color: var(--color-keypoint); + text-decoration: underline; +} + +.full-width { + grid-column: 1 / -1; +} + +.bio-text { + color: var(--color-text-primary); + line-height: 1.8; + white-space: pre-wrap; +} + +/* Images */ +.image-gallery { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); + gap: 1rem; + margin-top: 1rem; +} + +.image-gallery img { + width: 100%; + height: 250px; + object-fit: cover; + border-radius: 8px; + border: 2px solid var(--color-border); + transition: all 0.3s; +} + +.image-gallery img:hover { + transform: scale(1.05); + border-color: var(--color-brand); + box-shadow: 0 4px 12px rgba(255, 79, 163, 0.3); +} + +.profile-image { + max-width: 400px; + margin: 0 auto 2rem; + text-align: center; +} + +.profile-image img { + width: 100%; + border-radius: 12px; + border: 2px solid var(--color-border); + transition: border-color 0.3s; +} + +.profile-image img:hover { + border-color: var(--color-brand); +} + +.studio-logo, +.scene-poster { + margin-bottom: 2rem; + text-align: center; +} + +.studio-logo img { + max-width: 400px; + max-height: 200px; + object-fit: contain; + background: var(--color-bg-elevated); + padding: 1rem; + border-radius: 8px; + border: 1px solid var(--color-border); +} + +.scene-poster img { + max-width: 100%; + max-height: 500px; + object-fit: contain; + border-radius: 12px; + border: 2px solid var(--color-border); +} + +/* Item lists */ +.item-list { + list-style: none; +} + +.item-list li { + padding: 0.5rem 0; + border-bottom: 1px solid var(--color-border); +} + +.item-list li:last-child { + border-bottom: none; +} + +.item-list a { + color: var(--color-brand); + text-decoration: none; +} + +.item-list a:hover { + color: var(--color-keypoint); + text-decoration: underline; +} + +/* Tag list */ +.tag-list { + display: flex; + flex-wrap: wrap; + gap: 0.5rem; +} + +.tag { + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + color: white; + padding: 0.4rem 0.8rem; + border-radius: 16px; + font-size: 0.85rem; + box-shadow: 0 2px 4px rgba(255, 79, 163, 0.2); +} + +/* Modal */ +.modal { + display: none; + position: fixed; + z-index: 1000; + left: 0; + top: 0; + width: 100%; + height: 100%; + overflow: auto; + background-color: rgba(0,0,0,0.9); +} + +.modal.active { + display: block; +} + +.modal-content { + background-color: var(--color-bg-card); + border: 1px solid var(--color-brand); + margin: 5% auto; + padding: 2rem; + width: 90%; + max-width: 600px; + border-radius: 12px; + box-shadow: 0 4px 24px rgba(255, 79, 163, 0.3); +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 1.5rem; + border-bottom: 2px solid var(--color-bg-elevated); + padding-bottom: 1rem; +} + +.modal-header h3 { + color: var(--color-header); +} + +.close { + color: var(--color-text-secondary); + font-size: 2rem; + font-weight: bold; + cursor: pointer; + border: none; + background: none; + transition: color 0.3s; +} + +.close:hover { + color: var(--color-brand); +} + +.form-group { + margin-bottom: 1.5rem; +} + +.form-group label { + display: block; + margin-bottom: 0.5rem; + color: var(--color-text-secondary); + font-weight: 500; +} + +.form-group input, +.form-group select { + width: 100%; + border-radius: 10px; + outline: 2px solid #ff5fa2; + border: 0; + background-color: #ffb3d1; + outline-offset: 3px; + padding: 10px 1rem; + transition: 0.25s; + color: #1e1e1e; + font-weight: 500; +} + +.form-group input:focus, +.form-group select:focus { + outline-offset: 5px; + background-color: #ffffff; + color: #1e1e1e; +} + +.form-group input::placeholder { + color: #8c2f5c; + opacity: 0.7; +} + +.progress-bar { + width: 100%; + height: 8px; + background: var(--color-bg-elevated); + border-radius: 4px; + overflow: hidden; + margin: 1rem 0; +} + +.progress-fill { + height: 100%; + background: linear-gradient(90deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + transition: width 0.3s; +} + +.result-message { + padding: 1rem; + border-radius: 6px; + margin: 1rem 0; +} + +.result-message.success { + background: rgba(126, 231, 231, 0.1); + border: 1px solid var(--color-info); + color: var(--color-info); +} + +.result-message.error { + background: rgba(255, 170, 136, 0.1); + border: 1px solid var(--color-warning); + color: var(--color-warning); +} + +/* Responsive */ +@media (max-width: 768px) { + .page-header { + flex-direction: column; + align-items: flex-start; + } + + .search-form { + width: 100%; + } + + .search-form input { + width: 100%; + } + + .detail-header { + flex-direction: column; + gap: 1rem; + } + + .data-table { + font-size: 0.9rem; + } + + .data-table th, + .data-table td { + padding: 0.75rem; + } + + .description-cell { + max-width: 150px; + } + + .modal-content { + width: 95%; + margin: 10% auto; + } +} + +/* Scrollbar */ +::-webkit-scrollbar { + width: 12px; +} + +::-webkit-scrollbar-track { + background: var(--color-bg-dark); +} + +::-webkit-scrollbar-thumb { + background: var(--color-brand); + border-radius: 6px; +} + +::-webkit-scrollbar-thumb:hover { + background: var(--color-keypoint); +} + +/* Selection */ +::selection { + background: var(--color-brand); + color: white; +} + +/* Progress Bar */ +.progress-bar-wrapper { + padding: 1.5rem; +} + +.progress-bar { + width: 100%; + height: 30px; + background: var(--color-bg-elevated); + border-radius: 15px; + overflow: hidden; + border: 1px solid var(--color-border); + box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.3); +} + +.progress-fill { + height: 100%; + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%); + transition: width 0.3s ease; + box-shadow: 0 0 10px rgba(255, 79, 163, 0.5); + border-radius: 15px; +} + +.progress-text { + margin-top: 1rem; + text-align: center; + color: var(--color-text-primary); + font-size: 1rem; + font-weight: 500; +} + +/* ================================ + GX Checkbox (Premium Animated) + ================================ */ +.container input[type="checkbox"] { + position: absolute; + opacity: 0; + cursor: pointer; + height: 0; + width: 0; +} + +.container { + display: inline-block; + position: relative; + cursor: pointer; + font-size: 26px; + user-select: none; +} + +.checkmark { + position: relative; + width: 1.35em; + height: 1.35em; + background: #1E1E1E; + border-radius: 50%; + box-shadow: 0 0 0 2px #8C2F5C; + transition: background 0.4s ease, box-shadow 0.4s ease, transform 0.25s ease; +} + +.container:hover .checkmark { + box-shadow: 0 0 10px 2px rgba(255,95,162,0.25); + transform: scale(1.03); +} + +.container input[type="checkbox"]:checked ~ .checkmark { + background: radial-gradient(circle at 40% 40%, #FF9BCB 0%, #FF5FA2 40%, #8C2F5C 100%); + box-shadow: + 0 0 10px 5px rgba(255,95,162,0.35), + 0 0 20px 10px rgba(255,155,203,0.25); + animation: goondex-pulse 1.6s ease-out forwards; + transform: scale(1.08); +} + +.checkmark:after { + content: ""; + position: absolute; + border: solid rgba(233,233,233,0.28); + border-width: 0 0.18em 0.18em 0; + width: 0.30em; + height: 0.60em; + left: 0.47em; + top: 0.22em; + transform: rotate(45deg) scale(0.75); + opacity: 0.35; + transition: opacity 0.25s ease, transform 0.25s ease, border-color 0.25s ease; +} + +.container input[type="checkbox"]:checked ~ .checkmark:after { + border-color: #E9E9E9; + opacity: 1; + transform: rotate(45deg) scale(1); +} + +@keyframes goondex-pulse { + 0% { + box-shadow: + 0 0 0 0 rgba(255,95,162,0.5), + 0 0 0 0 rgba(255,155,203,0.4); + } + 40% { + box-shadow: + 0 0 20px 10px rgba(255,95,162,0.45), + 0 0 35px 20px rgba(255,155,203,0.35); + } + 100% { + box-shadow: + 0 0 10px 5px rgba(255,95,162,0.35), + 0 0 20px 10px rgba(255,155,203,0.25); + } +} + +/* ================================ + GX Loader (Premium Neon Spinner) + ================================ */ +.spinner { + width: 100px; + height: 100px; + border-radius: 50%; + background: conic-gradient( + from 0deg, + #FF5FA2 0%, + #FF77B4 20%, + #FF9BCB 40%, + #D96CA0 60%, + #8C2F5C 80%, + #FF5FA2 100% + ); + animation: spinning82341 1.7s linear infinite; + box-shadow: + 0 0 12px rgba(255,95,162,0.35), + 0 0 25px rgba(255,155,203,0.25), + 0 0 35px rgba(140,47,92,0.25); + filter: blur(1.5px); + position: relative; + display: inline-block; + margin: 2rem auto; +} + +.spinner1 { + width: 100px; + height: 100px; + border-radius: 50%; + background-color: #1E1E1E; + filter: blur(8px); + position: absolute; + top: 0; + left: 0; +} + +@keyframes spinning82341 { + to { + transform: rotate(360deg); + } +} + +/* ================================ + Scene Grid on Performer Pages + ================================ */ +.scenes-section { + margin-top: 3rem; +} + +.section-header { + margin-bottom: 2rem; +} + +.section-header h2 { + font-size: 2rem; + color: var(--color-header); + background: linear-gradient(135deg, var(--color-brand) 0%, var(--color-header) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +.scene-grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(280px, 1fr)); + gap: 1.5rem; +} + +.scene-card { + background: var(--color-bg-card); + border: 1px solid var(--color-border); + border-radius: 12px; + overflow: hidden; + transition: all 0.3s ease; + text-decoration: none; + color: inherit; + display: block; + box-shadow: 0 2px 8px rgba(255, 79, 163, 0.1); +} + +.scene-card:hover { + transform: translateY(-4px); + border-color: var(--color-brand); + box-shadow: 0 8px 24px rgba(255, 79, 163, 0.25); +} + +.scene-thumbnail { + width: 100%; + aspect-ratio: 16 / 9; + background: var(--color-bg-elevated); + position: relative; + overflow: hidden; +} + +.scene-thumbnail img { + width: 100%; + height: 100%; + object-fit: cover; + transition: transform 0.3s ease; +} + +.scene-card:hover .scene-thumbnail img { + transform: scale(1.05); +} + +.scene-thumbnail.no-image { + display: flex; + align-items: center; + justify-content: center; + background: linear-gradient(135deg, rgba(255, 79, 163, 0.1) 0%, rgba(215, 139, 224, 0.05) 100%); +} + +.no-image-text { + font-size: 3rem; + opacity: 0.3; +} + +.scene-info { + padding: 1rem; +} + +.scene-title { + font-size: 1rem; + font-weight: 600; + color: var(--color-text-primary); + margin: 0 0 0.5rem 0; + line-height: 1.4; + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; +} + +.scene-date, +.scene-code { + font-size: 0.85rem; + color: var(--color-text-secondary); + margin: 0.25rem 0; +} + +.scene-date { + color: var(--color-info); +} + +.scene-code { + color: var(--color-keypoint); +} + +/* ================================ + Image Lightbox + ================================ */ +.lightbox { + display: none; + position: fixed; + z-index: 9999; + left: 0; + top: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.95); + align-items: center; + justify-content: center; + cursor: pointer; +} + +.lightbox-content { + max-width: 90%; + max-height: 90%; + object-fit: contain; + animation: lightboxZoom 0.3s ease; +} + +@keyframes lightboxZoom { + from { + transform: scale(0.7); + opacity: 0; + } + to { + transform: scale(1); + opacity: 1; + } +} + +.lightbox-close { + position: absolute; + top: 2rem; + right: 3rem; + font-size: 3rem; + font-weight: bold; + color: var(--color-text-primary); + cursor: pointer; + transition: color 0.2s; +} + +.lightbox-close:hover { + color: var(--color-brand); +} + +.profile-image { + cursor: pointer; + transition: transform 0.2s; +} + +.profile-image:hover { + transform: scale(1.02); +} diff --git a/internal/web/static/css/theme.css b/internal/web/static/css/theme.css new file mode 100644 index 0000000..df4fc66 --- /dev/null +++ b/internal/web/static/css/theme.css @@ -0,0 +1,126 @@ +/* + * GOONDEX THEME / VARIABLES / RESET + * Updated for: Dark mode only + Medium Flamingo Pink neon accents + */ + +/* =========================== + * VARIABLES + * =========================== */ +:root { + /* --- BRAND IDENTITY --- */ + --color-brand: #FF4FA3; /* Flamingo Pink (core) */ + --color-brand-hover: #FF6AB7; /* Slightly brighter pink */ + --color-brand-glow: rgba(255, 79, 163, 0.35); /* SUBTLE neon glow */ + + /* --- TEXT --- */ + --color-text-primary: #F5F5F7; + --color-text-secondary: #A0A3AB; + --color-header: #E08FEA; + --color-keypoint: #FF6ACB; + + /* --- ALERTS --- */ + --color-warning: #FFAA88; + --color-info: #7EE7E7; + + /* --- BACKGROUND LAYERS (dark only) --- */ + --color-bg-dark: #0A0A0C; + --color-bg-card: #151517; + --color-bg-elevated: #212124; + + /* --- BORDERS --- */ + --color-border: #3d3d44; + --color-border-soft: rgba(255, 79, 163, 0.15); /* Flamingo soft border */ + + /* --- RADII --- */ + --radius: 12px; + --radius-soft: 20px; + + /* --- MOTION --- */ + --transition-fast: 0.15s ease; + --transition: 0.25s ease; + + /* --- UI GRID --- */ + --rail-width: 180px; + + /* --- GLOWS + SHADOWS (medium intensity only) --- */ + --shadow-glow-pink: 0 0 18px rgba(255, 79, 163, 0.28); + --shadow-glow-pink-soft: 0 0 38px rgba(255, 79, 163, 0.14); + --shadow-elevated: 0 6px 22px rgba(0, 0, 0, 0.6); +} + +/* =========================== + * RESET + * =========================== */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +/* =========================== + * BASE + * =========================== */ +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif; + line-height: 1.6; + color: var(--color-text-primary); + background: var(--color-bg-dark); + overflow-x: hidden; +} + +/* =========================== + * SCROLLBARS (dark + pink accent) + * =========================== */ +::-webkit-scrollbar { + width: 12px; +} + +::-webkit-scrollbar-track { + background: var(--color-bg-dark); +} + +::-webkit-scrollbar-thumb { + background: var(--color-brand); + border-radius: 6px; + box-shadow: var(--shadow-glow-pink-soft); +} + +::-webkit-scrollbar-thumb:hover { + background: var(--color-brand-hover); + box-shadow: var(--shadow-glow-pink); +} + +/* =========================== + * TEXT SELECTION + * =========================== */ +::selection { + background: var(--color-brand); + color: white; +} + +/* =========================== + * UTILITY CLASSES (for GX + layouts) + * =========================== */ + +/* Subtle glowing border */ +.glow-border { + border: 1px solid var(--color-border-soft); + box-shadow: var(--shadow-glow-pink-soft); +} + +/* Card elevation */ +.elevated { + background: var(--color-bg-elevated); + box-shadow: var(--shadow-elevated); +} + +/* Brand glow text (subtle) */ +.text-glow { + text-shadow: 0 0 12px var(--color-brand-glow); +} + +/* Pink glow panel (subtle accent for navbar or hero) */ +.panel-glow { + box-shadow: inset 0 0 60px rgba(255, 79, 163, 0.08), + 0 0 22px rgba(255, 79, 163, 0.20); +} diff --git a/internal/web/static/img/logo/GOONDEX_logo.png b/internal/web/static/img/logo/GOONDEX_logo.png new file mode 100644 index 0000000..c9b1a40 Binary files /dev/null and b/internal/web/static/img/logo/GOONDEX_logo.png differ diff --git a/internal/web/static/img/logo/GOONDEX_logo.svg b/internal/web/static/img/logo/GOONDEX_logo.svg new file mode 100644 index 0000000..1bf3bb8 --- /dev/null +++ b/internal/web/static/img/logo/GOONDEX_logo.svg @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + diff --git a/internal/web/static/img/logo/GOONDEX_logo_dark.png b/internal/web/static/img/logo/GOONDEX_logo_dark.png new file mode 100644 index 0000000..a286bba Binary files /dev/null and b/internal/web/static/img/logo/GOONDEX_logo_dark.png differ diff --git a/internal/web/static/img/logo/GOONDEX_logo_light.png b/internal/web/static/img/logo/GOONDEX_logo_light.png new file mode 100644 index 0000000..f29d3a1 Binary files /dev/null and b/internal/web/static/img/logo/GOONDEX_logo_light.png differ diff --git a/internal/web/static/img/logo/Team_GoonLOGO.png b/internal/web/static/img/logo/Team_GoonLOGO.png new file mode 100644 index 0000000..70b87e3 Binary files /dev/null and b/internal/web/static/img/logo/Team_GoonLOGO.png differ diff --git a/internal/web/static/js/app.js b/internal/web/static/js/app.js new file mode 100644 index 0000000..80cf420 --- /dev/null +++ b/internal/web/static/js/app.js @@ -0,0 +1,477 @@ +// Goondex Web UI JavaScript + +// Modal handling +function openModal(modalId) { + const modal = document.getElementById(modalId); + if (modal) { + modal.classList.add('active'); + } +} + +function closeModal(modalId) { + const modal = document.getElementById(modalId); + if (modal) { + modal.classList.remove('active'); + } +} + +// Import functions +// Global Search +let searchTimeout; +document.addEventListener('DOMContentLoaded', function() { + const searchInput = document.getElementById('global-search'); + if (searchInput) { + searchInput.addEventListener('input', function() { + clearTimeout(searchTimeout); + const query = this.value.trim(); + + if (query.length < 2) { + document.getElementById('global-search-results').style.display = 'none'; + return; + } + + searchTimeout = setTimeout(() => globalSearch(query), 300); + }); + } +}); + +async function globalSearch(query) { + try { + const response = await fetch(`/api/search?q=${encodeURIComponent(query)}`); + const result = await response.json(); + + if (result.success) { + displayGlobalSearchResults(result.data); + } + } catch (error) { + console.error('Search failed:', error); + } +} + +function displayGlobalSearchResults(data) { + const resultsDiv = document.getElementById('global-search-results'); + let html = '
'; + + if (data.total === 0) { + html += '

No results found

'; + } else { + html += `

Found ${data.total} results

`; + + if (data.performers && data.performers.length > 0) { + html += '

Performers

'; + } + + if (data.studios && data.studios.length > 0) { + html += '

Studios

'; + } + + if (data.scenes && data.scenes.length > 0) { + html += '

Scenes

'; + } + + if (data.tags && data.tags.length > 0) { + html += '

Tags

'; + data.tags.slice(0, 10).forEach(t => { + html += `${t.name}`; + }); + html += '
'; + } + } + + html += '
'; + resultsDiv.innerHTML = html; + resultsDiv.style.display = 'block'; +} + +// Bulk Import Functions +async function bulkImportAll() { + if (!confirm('This will import ALL data from TPDB. This may take several hours. Continue?')) { + return; + } + + setImportStatus('import-all', 'Importing all data from TPDB... This may take a while.', false); + + try { + const response = await fetch('/api/import/all', { + method: 'POST' + }); + + const result = await response.json(); + + if (result.success) { + let message = result.message + '\n\n'; + if (result.data) { + result.data.forEach(r => { + message += `${r.EntityType}: ${r.Imported}/${r.Total} imported, ${r.Failed} failed\n`; + }); + } + setImportStatus('import-all', message, true); + setTimeout(() => { + closeModal('import-all-modal'); + location.reload(); + }, 3000); + } else { + setImportStatus('import-all', result.message, false); + } + } catch (error) { + setImportStatus('import-all', 'Error: ' + error.message, false); + } +} + +async function bulkImportPerformers() { + if (!confirm('This will import ALL performers from TPDB. Continue?')) { + return; + } + + // Show progress modal + showProgressModal('performers'); + + // Connect to SSE endpoint + const eventSource = new EventSource('/api/import/all-performers/progress'); + + eventSource.onmessage = function(event) { + const data = JSON.parse(event.data); + + if (data.error) { + updateProgress('performers', 0, 0, data.error, true); + eventSource.close(); + return; + } + + if (data.complete) { + updateProgress('performers', 100, 100, `Complete! Imported ${data.result.Imported}/${data.result.Total} performers`, false); + eventSource.close(); + setTimeout(() => { + closeProgressModal(); + location.reload(); + }, 2000); + } else { + updateProgress('performers', data.current, data.total, data.message, false); + } + }; + + eventSource.onerror = function() { + updateProgress('performers', 0, 0, 'Connection error', true); + eventSource.close(); + }; +} + +async function bulkImportStudios() { + if (!confirm('This will import ALL studios from TPDB. Continue?')) { + return; + } + + // Show progress modal + showProgressModal('studios'); + + // Connect to SSE endpoint + const eventSource = new EventSource('/api/import/all-studios/progress'); + + eventSource.onmessage = function(event) { + const data = JSON.parse(event.data); + + if (data.error) { + updateProgress('studios', 0, 0, data.error, true); + eventSource.close(); + return; + } + + if (data.complete) { + updateProgress('studios', 100, 100, `Complete! Imported ${data.result.Imported}/${data.result.Total} studios`, false); + eventSource.close(); + setTimeout(() => { + closeProgressModal(); + location.reload(); + }, 2000); + } else { + updateProgress('studios', data.current, data.total, data.message, false); + } + }; + + eventSource.onerror = function() { + updateProgress('studios', 0, 0, 'Connection error', true); + eventSource.close(); + }; +} + +async function bulkImportScenes() { + if (!confirm('This will import ALL scenes from TPDB. Continue?')) { + return; + } + + // Show progress modal + showProgressModal('scenes'); + + // Connect to SSE endpoint + const eventSource = new EventSource('/api/import/all-scenes/progress'); + + eventSource.onmessage = function(event) { + const data = JSON.parse(event.data); + + if (data.error) { + updateProgress('scenes', 0, 0, data.error, true); + eventSource.close(); + return; + } + + if (data.complete) { + updateProgress('scenes', 100, 100, `Complete! Imported ${data.result.Imported}/${data.result.Total} scenes`, false); + eventSource.close(); + setTimeout(() => { + closeProgressModal(); + location.reload(); + }, 2000); + } else { + updateProgress('scenes', data.current, data.total, data.message, false); + } + }; + + eventSource.onerror = function() { + updateProgress('scenes', 0, 0, 'Connection error', true); + eventSource.close(); + }; +} + +function toggleFilterbar() { + document.getElementById('filterbar').classList.toggle('open'); +} + +function applyFilters() { + // Hook for your search/filter logic + console.log("Applying filters…"); +} + +function sortTable(columnIndex) { + const table = document.querySelector(".gx-table tbody"); + const rows = Array.from(table.querySelectorAll("tr")); + + const asc = table.getAttribute("data-sort") !== "asc"; + table.setAttribute("data-sort", asc ? "asc" : "desc"); + + rows.sort((a, b) => { + const A = a.children[columnIndex].innerText.trim().toLowerCase(); + const B = b.children[columnIndex].innerText.trim().toLowerCase(); + + if (!isNaN(A) && !isNaN(B)) return asc ? A - B : B - A; + return asc ? A.localeCompare(B) : B.localeCompare(A); + }); + + rows.forEach(r => table.appendChild(r)); +} + +// Search-based Import Functions +async function importPerformer() { + const query = document.getElementById('performer-query').value; + if (!query) { + alert('Please enter a performer name'); + return; + } + + setImportStatus('performer', 'Searching...', false); + + try { + const response = await fetch('/api/import/performer', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query }) + }); + + const result = await response.json(); + + if (result.success) { + setImportStatus('performer', result.message, true); + setTimeout(() => { + closeModal('search-performer-modal'); + location.reload(); + }, 1500); + } else { + setImportStatus('performer', result.message, false); + } + } catch (error) { + setImportStatus('performer', 'Error: ' + error.message, false); + } +} + +async function importStudio() { + const query = document.getElementById('studio-query').value; + if (!query) { + alert('Please enter a studio name'); + return; + } + + setImportStatus('studio', 'Searching...', false); + + try { + const response = await fetch('/api/import/studio', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query }) + }); + + const result = await response.json(); + + if (result.success) { + setImportStatus('studio', result.message, true); + setTimeout(() => { + closeModal('search-studio-modal'); + location.reload(); + }, 1500); + } else { + setImportStatus('studio', result.message, false); + } + } catch (error) { + setImportStatus('studio', 'Error: ' + error.message, false); + } +} + +async function importScene() { + const query = document.getElementById('scene-query').value; + if (!query) { + alert('Please enter a scene title'); + return; + } + + setImportStatus('scene', 'Searching...', false); + + try { + const response = await fetch('/api/import/scene', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query }) + }); + + const result = await response.json(); + + if (result.success) { + setImportStatus('scene', result.message, true); + setTimeout(() => { + closeModal('search-scene-modal'); + location.reload(); + }, 1500); + } else { + setImportStatus('scene', result.message, false); + } + } catch (error) { + setImportStatus('scene', 'Error: ' + error.message, false); + } +} + +async function syncAll() { + const force = document.getElementById('sync-force').checked; + setImportStatus('sync', 'Syncing all data from TPDB...', false); + + try { + const response = await fetch('/api/sync', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ force }) + }); + + const result = await response.json(); + + if (result.success) { + let message = result.message + '\n\n'; + if (result.data) { + result.data.forEach(r => { + message += `${r.EntityType}: ${r.Updated} updated, ${r.Failed} failed\n`; + }); + } + setImportStatus('sync', message, true); + setTimeout(() => { + closeModal('sync-modal'); + location.reload(); + }, 2000); + } else { + setImportStatus('sync', result.message, false); + } + } catch (error) { + setImportStatus('sync', 'Error: ' + error.message, false); + } +} + +function setImportStatus(type, message, success) { + const statusEl = document.getElementById(`${type}-import-status`); + if (statusEl) { + statusEl.textContent = message; + statusEl.className = 'result-message ' + (success ? 'success' : 'error'); + statusEl.style.display = 'block'; + } +} + +// Close modals when clicking outside +window.onclick = function(event) { + if (event.target.classList.contains('modal')) { + event.target.classList.remove('active'); + } +} + +// Image error handling +function handleImageError(img) { + img.style.display = 'none'; +} + +// Progress Modal Functions +function showProgressModal(entityType) { + const modal = document.getElementById('progress-modal'); + if (!modal) { + // Create progress modal if it doesn't exist + const modalHTML = ` + + `; + document.body.insertAdjacentHTML('beforeend', modalHTML); + } else { + modal.classList.add('active'); + } +} + +function closeProgressModal() { + const modal = document.getElementById('progress-modal'); + if (modal) { + modal.classList.remove('active'); + } +} + +function updateProgress(entityType, current, total, message, isError) { + const progressFill = document.getElementById('progress-fill'); + const progressText = document.getElementById('progress-text'); + + if (progressFill && progressText) { + const percent = total > 0 ? (current / total * 100) : 0; + progressFill.style.width = percent + '%'; + progressText.textContent = message; + + if (isError) { + progressFill.style.background = 'var(--color-warning)'; + progressText.style.color = 'var(--color-warning)'; + } else { + progressFill.style.background = 'linear-gradient(135deg, var(--color-brand) 0%, var(--color-keypoint) 100%)'; + progressText.style.color = 'var(--color-text-primary)'; + } + } +} diff --git a/internal/web/templates/dashboard.html b/internal/web/templates/dashboard.html new file mode 100644 index 0000000..9790a81 --- /dev/null +++ b/internal/web/templates/dashboard.html @@ -0,0 +1,259 @@ + + + + + + Goondex - Dashboard + + + + + + + + +
+ + + +
+ + +
+ + + + +
+ + +
+

Welcome to Goondex

+

Your professional adult media indexer powered by ThePornDB

+ +
+ + + +
+
+ + +
+ +
+
+ + +
+ +
+
👤
+
+
{{.PerformerCount}}
+
Performers
+
+
+ View all → + +
+
+ + +
+
🏢
+
+
{{.StudioCount}}
+
Studios
+
+
+ View all → + +
+
+ + +
+
🎬
+
+
{{.SceneCount}}
+
Scenes
+
+
+ View all → + +
+
+ + +
+
🎞️
+
+
{{.MovieCount}}
+
Movies
+
+ +
+
+ + +
+

Import from ThePornDB

+

+ Import ALL data from ThePornDB. This downloads performers, studios, scenes, and metadata. +

+ +
+ + + + + + + +
+ +

Or search for specific items:

+ +
+ + + + + +
+
+ +
+
+ + +
+ + + +
+ + + {{/* Your modals remain exactly as before */}} + + + + diff --git a/internal/web/templates/movie_detail.html b/internal/web/templates/movie_detail.html new file mode 100644 index 0000000..df9b79b --- /dev/null +++ b/internal/web/templates/movie_detail.html @@ -0,0 +1,117 @@ + + + + + + {{.Movie.Title}} - Goondex + + + + + +
+
+
+ {{if .Movie.ImageURL}} + {{.Movie.Title}} + {{else}} +
No Image
+ {{end}} +
+ +
+

{{.Movie.Title}}

+ +
+ {{if .Movie.Date}} +
+ Release Date: {{.Movie.Date}} +
+ {{end}} + + {{if .StudioName}} +
+ Studio: + {{if .Movie.StudioID}} + {{.StudioName}} + {{else}} + {{.StudioName}} + {{end}} +
+ {{end}} + + {{if .Movie.Director}} +
+ Director: {{.Movie.Director}} +
+ {{end}} + + {{if .Movie.Duration}} +
+ Duration: {{.Movie.Duration}} minutes +
+ {{end}} + + {{if .Movie.Source}} +
+ Source: {{.Movie.Source}} + {{if .Movie.URL}} + | View on {{.Movie.Source}} + {{end}} +
+ {{end}} +
+ + {{if .Movie.Description}} +
+

Description

+

{{.Movie.Description}}

+
+ {{end}} +
+
+ + {{if .Scenes}} +
+

Scenes ({{len .Scenes}})

+
+ {{range .Scenes}} +
+
+
+ +
+
{{.Title}}
+ {{if .Date}} +
📅 {{.Date}}
+ {{end}} +
+
+ {{end}} +
+
+ {{end}} + + {{if .Movie.BackImageURL}} +
+

Back Cover

+
+ {{.Movie.Title}} - Back Cover +
+
+ {{end}} +
+ + diff --git a/internal/web/templates/movies.html b/internal/web/templates/movies.html new file mode 100644 index 0000000..004cab3 --- /dev/null +++ b/internal/web/templates/movies.html @@ -0,0 +1,79 @@ + + + + + + Movies - Goondex + + + + + +
+ + + {{if .Movies}} +
+ {{range .Movies}} +
+
+
+ +
+
{{.Movie.Title}}
+ + {{if .Movie.Date}} +
📅 {{.Movie.Date}}
+ {{end}} + +
{{.SceneCount}} scenes
+ + {{if .StudioName}} +
+ 🎬 {{.StudioName}} +
+ {{end}} + + {{if .Movie.Duration}} +
+ {{.Movie.Duration}} min + {{if .Movie.Source}} + {{.Movie.Source}} + {{end}} +
+ {{end}} +
+
+ {{end}} +
+ {{else}} +
+

No movies found.

+ {{if .Query}} +

Try a different search term or view all movies.

+ {{else}} +

Movies will appear here once imported from TPDB or Adult Empire.

+ {{end}} +
+ {{end}} +
+ + diff --git a/internal/web/templates/performer_detail.html b/internal/web/templates/performer_detail.html new file mode 100644 index 0000000..1acf84a --- /dev/null +++ b/internal/web/templates/performer_detail.html @@ -0,0 +1,272 @@ + + + + + + {{.Performer.Name}} - Goondex + + + + + +
+ + +
+ {{if or .Performer.ImageURL .Performer.PosterURL}} + + {{end}} + +
+
+

{{.Performer.Name}}

+ {{if .Performer.Aliases}} +

aka {{.Performer.Aliases}}

+ {{end}} +
+
ID: {{.Performer.ID}}
+
+ +
+
+

Statistics

+
+ Scenes: + {{.SceneCount}} +
+
+ + {{if or .Performer.Gender .Performer.Birthday .Performer.Birthplace .Performer.Nationality}} +
+

Personal Information

+ {{if .Performer.Gender}} +
+ Gender: + {{.Performer.Gender}} +
+ {{end}} + {{if .Performer.Birthday}} +
+ Birthday: + {{.Performer.Birthday}} +
+ {{end}} + {{if .Performer.DateOfDeath}} +
+ Date of Death: + {{.Performer.DateOfDeath}} +
+ {{end}} + {{if .Performer.Astrology}} +
+ Astrology: + {{.Performer.Astrology}} +
+ {{end}} + {{if .Performer.Birthplace}} +
+ Birthplace: + {{.Performer.Birthplace}} +
+ {{end}} + {{if .Performer.Nationality}} +
+ Nationality: + {{.Performer.Nationality}} +
+ {{end}} + {{if .Performer.Ethnicity}} +
+ Ethnicity: + {{.Performer.Ethnicity}} +
+ {{end}} +
+ {{end}} + + {{if or .Performer.Height .Performer.Weight .Performer.Measurements .Performer.HairColor .Performer.EyeColor}} +
+

Physical Attributes

+ {{if .Performer.Height}} +
+ Height: + {{.Performer.Height}} cm +
+ {{end}} + {{if .Performer.Weight}} +
+ Weight: + {{.Performer.Weight}} kg +
+ {{end}} + {{if .Performer.Measurements}} +
+ Measurements: + {{.Performer.Measurements}} +
+ {{end}} + {{if .Performer.CupSize}} +
+ Cup Size: + {{.Performer.CupSize}} +
+ {{end}} + {{if .Performer.HairColor}} +
+ Hair Color: + {{.Performer.HairColor}} +
+ {{end}} + {{if .Performer.EyeColor}} +
+ Eye Color: + {{.Performer.EyeColor}} +
+ {{end}} + {{if .Performer.TattooDescription}} +
+ Tattoos: + {{.Performer.TattooDescription}} +
+ {{end}} + {{if .Performer.PiercingDescription}} +
+ Piercings: + {{.Performer.PiercingDescription}} +
+ {{end}} +
+ {{end}} + + {{if .Performer.Career}} +
+

Career

+
+ Years: + {{.Performer.Career}} +
+
+ {{end}} + + {{if or .Performer.Source .Performer.ImageURL}} +
+

Metadata

+ {{if .Performer.Source}} +
+ Source: + {{.Performer.Source}} +
+
+ Source ID: + {{.Performer.SourceID}} +
+ {{end}} + {{if .Performer.ImageURL}} +
+ Image: + View +
+ {{end}} +
+ {{end}} +
+ + {{if .Performer.Bio}} +
+

Biography

+

{{.Performer.Bio}}

+
+ {{end}} +
+ + + {{if .Scenes}} + + {{else}} +
+

No scenes found for this performer.

+

Try importing scenes from ThePornDB or Adult Empire.

+
+ {{end}} +
+ + + + + + + + diff --git a/internal/web/templates/performers.html b/internal/web/templates/performers.html new file mode 100644 index 0000000..bad7000 --- /dev/null +++ b/internal/web/templates/performers.html @@ -0,0 +1,91 @@ + + + + + + Performers - Goondex + + + + + +
+ + + {{if .Performers}} +
+ {{range .Performers}} +
+
+
+ +
+
+ {{.Performer.Name}}{{if gt .Age 0}} ({{.Age}}){{end}} +
+
{{.SceneCount}} scenes
+ + {{if .Performer.Nationality}} +
+ {{if .CountryFlag}}{{.CountryFlag}}{{else}}🌍{{end}} {{.Performer.Nationality}} +
+ {{end}} + + {{if .Performer.Gender}} +
+ {{.Performer.Gender}} + {{if .Performer.Source}} + {{.Performer.Source}} + {{end}} +
+ {{end}} +
+
+ {{end}} +
+ {{else}} +
+

No performers found.

+ {{if .Query}} +

Try a different search term or view all performers.

+ {{else}} +

Import performers using the dashboard or CLI: ./goondex import performer "name"

+ {{end}} +
+ {{end}} +
+ + diff --git a/internal/web/templates/scene_detail.html b/internal/web/templates/scene_detail.html new file mode 100644 index 0000000..6990e00 --- /dev/null +++ b/internal/web/templates/scene_detail.html @@ -0,0 +1,141 @@ + + + + + + {{.Scene.Title}} - Goondex + + + + + +
+ + +
+ {{if .Scene.ImageURL}} +
+ {{.Scene.Title}} +
+ {{end}} + +
+
+

{{.Scene.Title}}

+ {{if .Scene.Code}} +

Code: {{.Scene.Code}}

+ {{end}} +
+
ID: {{.Scene.ID}}
+
+ +
+
+

Information

+ {{if .Scene.Date}} +
+ Date: + {{.Scene.Date}} +
+ {{end}} + {{if .StudioName}} +
+ Studio: + + {{if .Scene.StudioID}} + {{.StudioName}} + {{else}} + {{.StudioName}} + {{end}} + +
+ {{end}} + {{if .Scene.Director}} +
+ Director: + {{.Scene.Director}} +
+ {{end}} +
+ + {{if .Performers}} +
+

Performers ({{len .Performers}})

+
    + {{range .Performers}} +
  • {{.Name}}
  • + {{end}} +
+
+ {{end}} + + {{if .Tags}} +
+

Tags ({{len .Tags}})

+
+ {{range .Tags}} + {{.Name}} + {{end}} +
+
+ {{end}} + + {{if .Movies}} +
+

Movies ({{len .Movies}})

+ +
+ {{end}} + + {{if or .Scene.Source .Scene.URL}} +
+

Metadata

+ {{if .Scene.Source}} +
+ Source: + {{.Scene.Source}} +
+
+ Source ID: + {{.Scene.SourceID}} +
+ {{end}} + {{if .Scene.URL}} +
+ URL: + View +
+ {{end}} +
+ {{end}} +
+ + {{if .Scene.Description}} +
+

Description

+

{{.Scene.Description}}

+
+ {{end}} +
+
+ + + + diff --git a/internal/web/templates/scenes.html b/internal/web/templates/scenes.html new file mode 100644 index 0000000..7a2089f --- /dev/null +++ b/internal/web/templates/scenes.html @@ -0,0 +1,83 @@ + + + + + + Scenes - Goondex + + + + + + +
+ + + {{if .Scenes}} +
+ {{range .Scenes}} +
+
+
+ +
+
{{.Scene.Title}}
+ + {{if .Scene.Date}} +
📅 {{.Scene.Date}}
+ {{end}} + + {{if .StudioName}} +
+ 🏢 {{.StudioName}} +
+ {{end}} + +
+ {{if .Scene.Code}} + {{.Scene.Code}} + {{end}} + {{if .Scene.Source}} + {{.Scene.Source}} + {{end}} +
+
+
+ {{end}} +
+ {{else}} +
+

No scenes found.

+ {{if .Query}} +

Try a different search term or view all scenes.

+ {{else}} +

Import scenes using the dashboard or CLI: ./goondex import scene "title"

+ {{end}} +
+ {{end}} +
+ + diff --git a/internal/web/templates/studio_detail.html b/internal/web/templates/studio_detail.html new file mode 100644 index 0000000..4d93107 --- /dev/null +++ b/internal/web/templates/studio_detail.html @@ -0,0 +1,77 @@ + + + + + + {{.Studio.Name}} - Goondex + + + + + +
+ + +
+
+
+

{{.Studio.Name}}

+
+
ID: {{.Studio.ID}}
+
+ + {{if .Studio.ImageURL}} + + {{end}} + +
+
+

Statistics

+
+ Scenes: + {{.SceneCount}} +
+
+ + {{if or .Studio.Source .Studio.Description}} +
+

Metadata

+ {{if .Studio.Source}} +
+ Source: + {{.Studio.Source}} +
+
+ Source ID: + {{.Studio.SourceID}} +
+ {{end}} +
+ {{end}} +
+ + {{if .Studio.Description}} +
+

Description

+

{{.Studio.Description}}

+
+ {{end}} +
+
+ + diff --git a/internal/web/templates/studios.html b/internal/web/templates/studios.html new file mode 100644 index 0000000..dcbb282 --- /dev/null +++ b/internal/web/templates/studios.html @@ -0,0 +1,71 @@ + + + + + + Studios - Goondex + + + + + +
+ + + {{if .Studios}} +
+ {{range .Studios}} +
+
+
+ +
+
{{.Studio.Name}}
+
{{.SceneCount}} scenes
+ + {{if .Studio.Description}} +
+ {{.Studio.Description}} +
+ {{end}} + + {{if .Studio.Source}} +
+ {{.Studio.Source}} +
+ {{end}} +
+
+ {{end}} +
+ {{else}} +
+

No studios found.

+ {{if .Query}} +

Try a different search term or view all studios.

+ {{else}} +

Import studios using the dashboard or CLI: ./goondex import studio "name"

+ {{end}} +
+ {{end}} +
+ + diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..2a23145 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,42 @@ +{ + "name": "Goondex", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "bootstrap": "^5.3.8" + } + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "license": "MIT", + "peer": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/bootstrap": { + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.3.8.tgz", + "integrity": "sha512-HP1SZDqaLDPwsNiqRqi5NcP0SSXciX2s9E+RyqJIIqGo+vJeN5AJVM98CXmW/Wux0nQ5L7jeWUdplCEf0Ee+tg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/twbs" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/bootstrap" + } + ], + "license": "MIT", + "peerDependencies": { + "@popperjs/core": "^2.11.8" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..719e5df --- /dev/null +++ b/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "bootstrap": "^5.3.8" + } +}