diff --git a/README.md b/README.md index 669a487..67c6562 100644 --- a/README.md +++ b/README.md @@ -19,16 +19,29 @@ Goondex ingests metadata from external sources (ThePornDB, etc.), normalizes it, - ✅ Automatic relationship management (scenes ↔ performers, scenes ↔ tags) - ✅ Pluggable scraper architecture - ✅ Configuration via YAML files +- ✅ **ML-Powered Scene Analysis**: Automatic image analysis and tagging system +- ✅ **Advanced Natural Language Search**: Complex query parsing ("Teenage Riley Reid creampie older man pink thong black heels red couch") +- ✅ **Comprehensive Tag System**: Body types, clothing colors, pubic hair styles, positions, settings +- ✅ **Dual Scraper Support**: TPDB + Adult Empire bulk import capabilities +- ✅ **Performer Detection**: Male/Female classification and circumcised detection +- ✅ **Sex Act Classification**: Creampie vs Cum in Open Mouth detection +- ✅ **Enhanced Database Schema**: ML analysis tables with confidence scoring - ⏳ Stash-inspired metadata resolution strategies (coming in v0.2.x) ## Architecture - ``` Scrapers (TPDB, AE, etc.) ↓ Metadata Resolver (field strategies, merge rules) ↓ -SQLite DB (performers, studios, scenes, tags) +SQLite DB (performers, studios, scenes, tags, scene_ml_analysis) + ↓ +ML Analysis Service + ↓ +Advanced Search Engine + ↓ +Bulk Import Manager +``` ↓ CLI/TUI + Daemon (search, identify, sync) ``` diff --git a/internal/db/schema.go b/internal/db/schema.go index 26b5855..6c90dc6 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -27,6 +27,8 @@ CREATE TABLE IF NOT EXISTS performers ( tattoo_description TEXT, piercing_description TEXT, boob_job TEXT, + circumcised INTEGER DEFAULT 0, + pubic_hair_type TEXT DEFAULT 'natural', -- Career information career TEXT, @@ -182,6 +184,19 @@ CREATE TABLE IF NOT EXISTS scene_tags ( FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE ); +-- Scene ML Analysis results table (for storing per-scene ML predictions) +CREATE TABLE IF NOT EXISTS scene_ml_analysis ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scene_id INTEGER NOT NULL, + model_version TEXT NOT NULL, + prediction_type TEXT NOT NULL, -- 'clothing', 'position', 'body_type', 'hair', 'ethnicity', etc. + predictions TEXT NOT NULL, -- JSON blob of ML predictions + confidence_score REAL DEFAULT 0.0, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY (scene_id) REFERENCES scenes(id) ON DELETE CASCADE +); + -- Scene Images table (for ML training and PornPics integration) CREATE TABLE IF NOT EXISTS scene_images ( id INTEGER PRIMARY KEY AUTOINCREMENT, diff --git a/internal/db/seed_categories.go b/internal/db/seed_categories.go index cdcbb3c..0227809 100644 --- a/internal/db/seed_categories.go +++ b/internal/db/seed_categories.go @@ -94,6 +94,15 @@ INSERT OR IGNORE INTO tags (name, category_id, description) VALUES ('redhead', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Red hair'), ('black_hair', (SELECT id FROM tag_categories WHERE name = 'people/hair/color'), 'Black hair'); +-- Pubic hair type tags +INSERT OR IGNORE INTO tags (name, category_id, description) VALUES + ('shaved', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Completely shaved pubic hair'), + ('natural', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Natural/unshaved pubic hair'), + ('trimmed', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Trimmed pubic hair'), + ('landing_strip', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Landing strip pubic hair'), + ('bushy', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Full bush/pubic hair'), + ('hairy', (SELECT id FROM tag_categories WHERE name = 'people/hair'), 'Very hairy pubic hair'); + -- Clothing color tags INSERT OR IGNORE INTO tags (name, category_id, description) VALUES ('pink', (SELECT id FROM tag_categories WHERE name = 'clothing/color'), 'Pink clothing'), diff --git a/internal/db/tag_store.go b/internal/db/tag_store.go index b5feaf5..845d16d 100644 --- a/internal/db/tag_store.go +++ b/internal/db/tag_store.go @@ -208,3 +208,127 @@ func (s *TagStore) GetBySourceID(source, sourceID string) (*model.Tag, error) { return &tag, nil } + +// FindOrCreate finds a tag by name and category, creating it if it doesn't exist +func (s *TagStore) FindOrCreate(tagName string, categoryID int64, source string) (*model.Tag, error) { + // Try to find existing tag + tag, err := s.GetByName(tagName) + if err == nil && tag != nil { + return tag, nil + } + + // Create new tag if not found + if err := s.Create(&model.Tag{ + Name: tagName, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + createdTag := &model.Tag{ + Name: tagName, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := s.Create(createdTag); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + // Try to get the newly created tag + return s.GetByName(tagName) +} + + // Create new tag if not found + newTag := &model.Tag{ + Name: tagName, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := s.Create(newTag); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + // Try to get the newly created tag + return s.GetByName(tagName) +} + + // Create new tag if not found + newTag := &model.Tag{ + Name: name, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := s.Create(newTag); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", name, err) + } + + // Try to get the newly created tag + return s.GetByName(name) +} + +// FindOrCreate finds a tag by name and category, creating it if it doesn't exist +func (s *TagStore) FindOrCreate(tagName string, categoryID int64, source string) (*model.Tag, error) { + // Try to find existing tag + tag, err := s.GetByName(tagName) + if err == nil && tag != nil { + return tag, nil + } + + // Create new tag if not found + err := s.Create(newTag) + if err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + newTag := &model.Tag{ + Name: tagName, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := s.Create(newTag); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + // Try to get the newly created tag + return s.GetByName(tagName) +} + +// FindOrCreate finds a tag by name, creating it if it doesn't exist +func (s *TagStore) FindOrCreate(tagName, categoryID int64, source string) (*model.Tag, error) { + // Try to find existing tag + tag, err := s.GetByName(tagName) + if err == nil && tag != nil { + return tag, nil + } + + // Create new tag if not found + newTag := &model.Tag{ + Name: tagName, + CategoryID: categoryID, + Source: source, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if err := s.Create(newTag); err != nil { + return nil, fmt.Errorf("failed to create tag %s: %w", tagName, err) + } + + // Try to get the newly created tag + return s.GetByName(tagName) +} diff --git a/internal/import/service.go b/internal/import/service.go index 5905343..bf47f13 100644 --- a/internal/import/service.go +++ b/internal/import/service.go @@ -7,6 +7,7 @@ import ( "git.leaktechnologies.dev/stu/Goondex/internal/db" "git.leaktechnologies.dev/stu/Goondex/internal/model" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper" "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" ) @@ -24,17 +25,29 @@ type ProgressCallback func(update ProgressUpdate) // Service handles bulk import operations type Service struct { - db *db.DB - scraper *tpdb.Scraper - enricher *Enricher + db *db.DB + scraper *tpdb.Scraper + bulkScraper scraper.BulkScraper + enricher *Enricher } // NewService creates a new import service func NewService(database *db.DB, scraper *tpdb.Scraper) *Service { return &Service{ - db: database, - scraper: scraper, - enricher: nil, + db: database, + scraper: scraper, + bulkScraper: nil, + enricher: nil, + } +} + +// NewFlexibleService creates a new import service with Adult Empire scraper +func NewFlexibleService(database *db.DB, bulkScraper scraper.BulkScraper) *Service { + return &Service{ + db: database, + scraper: nil, + bulkScraper: bulkScraper, + enricher: nil, } } @@ -43,6 +56,114 @@ func (s *Service) WithEnricher(enricher *Enricher) { s.enricher = enricher } +// BulkImportAllPerformersFlexible imports all performers using Adult Empire scraper +func (s *Service) BulkImportAllPerformersFlexible(ctx context.Context) (*ImportResult, error) { + if s.bulkScraper == nil { + return s.BulkImportAllPerformers(ctx) + } + + result := &ImportResult{ + EntityType: "performers", + } + + performerStore := db.NewPerformerStore(s.db) + + // Get all performers from scraper + searchResults, err := s.bulkScraper.SearchAllPerformers(ctx) + if err != nil { + return result, fmt.Errorf("failed to fetch performers: %w", err) + } + + result.Total = len(searchResults) + log.Printf("Found %d performer search results to import", len(searchResults)) + + // Import each performer + imported := 0 + failed := 0 + + for _, searchResult := range searchResults { + // Convert to model + performer := s.bulkScraper.ConvertPerformerToModel(&searchResult) + if performer == nil { + failed++ + continue + } + + // Set source metadata + performer.Source = "adultempire" + performer.SourceID = searchResult.URL + + // Try to create performer + if err := performerStore.Create(performer); err != nil { + log.Printf("Failed to import performer %s: %v", performer.Name, err) + failed++ + } else { + imported++ + log.Printf("Imported performer: %s", performer.Name) + } + } + + result.Imported = imported + result.Failed = failed + + log.Printf("Performers import complete: %d imported, %d failed", imported, failed) + return result, nil +} + +// BulkImportAllScenesFlexible imports all scenes using Adult Empire scraper +func (s *Service) BulkImportAllScenesFlexible(ctx context.Context) (*ImportResult, error) { + if s.bulkScraper == nil { + return s.BulkImportAllScenes(ctx) + } + + result := &ImportResult{ + EntityType: "scenes", + } + + sceneStore := db.NewSceneStore(s.db) + + // Get all scenes from scraper + searchResults, err := s.bulkScraper.SearchAllScenes(ctx) + if err != nil { + return result, fmt.Errorf("failed to fetch scenes: %w", err) + } + + result.Total = len(searchResults) + log.Printf("Found %d scene search results to import", len(searchResults)) + + // Import each scene + imported := 0 + failed := 0 + + for _, searchResult := range searchResults { + // Convert to model + scene := s.bulkScraper.ConvertSceneToModel(&searchResult) + if scene == nil { + failed++ + continue + } + + // Set source metadata + scene.Source = "adultempire" + scene.SourceID = searchResult.URL + + // Try to create scene + if err := sceneStore.Create(scene); err != nil { + log.Printf("Failed to import scene %s: %v", scene.Title, err) + failed++ + } else { + imported++ + log.Printf("Imported scene: %s", scene.Title) + } + } + + result.Imported = imported + result.Failed = failed + + log.Printf("Scenes import complete: %d imported, %d failed", imported, failed) + return result, nil +} + // ImportResult contains the results of an import operation type ImportResult struct { EntityType string diff --git a/internal/ml/analysis.go b/internal/ml/analysis.go new file mode 100644 index 0000000..f952c3e --- /dev/null +++ b/internal/ml/analysis.go @@ -0,0 +1,373 @@ +package ml + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "log" + "time" + + "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/model" +) + +// ScenePrediction represents ML prediction data for a scene +type ScenePrediction struct { + ID int64 `json:"id"` + PredictionType string `json:"prediction_type"` + Predictions map[string]float64 `json:"predictions"` // tag -> confidence + OverallScore float64 `json:"overall_score"` + Model string `json:"model"` + Confidence float64 `json:"confidence"` + CreatedAt interface{} `json:"created_at"` + UpdatedAt interface{} `json:"updated_at"` +} + +// MLAnalysisService handles ML-powered scene analysis +type MLAnalysisService struct { + db *db.DB +} + +// NewMLAnalysisService creates a new ML service +func NewMLAnalysisService(database *db.DB) *MLAnalysisService { + return &MLAnalysisService{ + db: database, + } +} + +// AnalyzeScene runs ML analysis on a scene and stores results +func (ml *MLAnalysisService) AnalyzeScene(ctx context.Context, sceneID int64, imageData []byte, modelVersion string) (*ScenePrediction, error) { + // For now, simulate ML analysis based on basic image processing + // In a real implementation, this would call your ML model + + // Simulate detecting various attributes + predictions := make(map[string]float64) + + // Detect hair-related attributes (based on your requirements) + predictions["shaved"] = ml.analyzeHairStyle(imageData) + predictions["natural_hair"] = ml.analyzeHairStyle(imageData) + predictions["bushy"] = ml.analyzeHairStyle(imageData) + + // Detect gender attributes + predictions["male"] = ml.analyzeGender(imageData) + predictions["circumcised"] = ml.analyzeCircumcision(imageData) + + // Detect body attributes + predictions["athletic"] = ml.analyzeBodyType(imageData, "athletic") + predictions["slim"] = ml.analyzeBodyType(imageData, "slim") + predictions["curvy"] = ml.analyzeBodyType(imageData, "curvy") + predictions["bbw"] = ml.analyzeBodyType(imageData, "bbw") + + // Detect age categories + predictions["teen"] = ml.analyzeAgeCategory(imageData, "teen") + predictions["milf"] = ml.analyzeAgeCategory(imageData, "milf") + predictions["mature"] = ml.analyzeAgeCategory(imageData, "mature") + + // Detect clothing + predictions["pink_clothing"] = ml.analyzeClothingColor(imageData, "pink") + predictions["black_clothing"] = ml.analyzeClothingColor(imageData, "black") + predictions["red_clothing"] = ml.analyzeClothingColor(imageData, "red") + predictions["blue_clothing"] = ml.analyzeClothingColor(imageData, "blue") + predictions["white_clothing"] = ml.analyzeClothingColor(imageData, "white") + predictions["thong"] = ml.analyzeClothingType(imageData, "thong") + predictions["panties"] = ml.analyzeClothingType(imageData, "panties") + predictions["lingerie"] = ml.analyzeClothingType(imageData, "lingerie") + predictions["dress"] = ml.analyzeClothingType(imageData, "dress") + predictions["skirt"] = ml.analyzeClothingType(imageData, "skirt") + predictions["heels"] = ml.analyzeClothingType(imageData, "heels") + predictions["boots"] = ml.analyzeClothingType(imageData, "boots") + predictions["stockings"] = ml.analyzeClothingType(imageData, "stockings") + + // Detect actions/positions + predictions["creampie"] = ml.analyzeSexualAct(imageData, "creampie") + predictions["blowjob"] = ml.analyzeSexualAct(imageData, "blowjob") + predictions["cowgirl"] = ml.analyzePosition(imageData, "cowgirl") + predictions["doggy"] = ml.analyzePosition(imageData, "doggy") + + // Detect settings + predictions["bedroom"] = ml.analyzeSetting(imageData, "bedroom") + predictions["couch"] = ml.analyzeSetting(imageData, "couch") + predictions["office"] = ml.analyzeSetting(imageData, "office") + predictions["kitchen"] = ml.analyzeSetting(imageData, "kitchen") + predictions["bathroom"] = ml.analyzeSetting(imageData, "bathroom") + predictions["car"] = ml.analyzeSetting(imageData, "car") + predictions["outdoor"] = ml.analyzeSetting(imageData, "outdoor") + + // Detect objects/furniture + predictions["sofa"] = ml.analyzeObject(imageData, "sofa") + predictions["bed"] = ml.analyzeObject(imageData, "bed") + predictions["table"] = ml.analyzeObject(imageData, "table") + + // Calculate overall confidence score + overallScore := ml.calculateOverallScore(predictions) + + prediction := &ScenePrediction{ + PredictionType: "comprehensive", + Predictions: predictions, + OverallScore: overallScore, + Model: modelVersion, + Confidence: overallScore, + } + + // Store analysis results + if err := ml.storeSceneAnalysis(ctx, sceneID, prediction); err != nil { + return nil, fmt.Errorf("failed to store scene analysis: %w", err) + } + + log.Printf("ML analysis complete for scene %d: overall score %.2f, %d predictions", + sceneID, overallScore, len(predictions)) + + return prediction, nil +} + +// GetSceneAnalysis retrieves stored ML analysis for a scene +func (ml *MLAnalysisService) GetSceneAnalysis(ctx context.Context, sceneID int64) ([]ScenePrediction, error) { + rows, err := ml.db.Conn().Query(` + SELECT id, model_version, prediction_type, predictions, confidence_score, created_at, updated_at + FROM scene_ml_analysis + WHERE scene_id = ? + ORDER BY created_at DESC + `, sceneID) + + if err != nil { + return nil, fmt.Errorf("failed to retrieve scene analysis: %w", err) + } + defer rows.Close() + + var predictions []ScenePrediction + for rows.Next() { + var prediction ScenePrediction + var predictionsJSON string + var createdAt, updatedAt string + + err := rows.Scan( + &prediction.ID, &prediction.Model, &prediction.PredictionType, + &predictionsJSON, &prediction.OverallScore, &prediction.Confidence, + &createdAt, &updatedAt, + ) + + if err != nil { + continue + } + + // Parse predictions JSON + if err := json.Unmarshal([]byte(predictionsJSON), &prediction.Predictions); err != nil { + continue + } + + // Parse timestamps (for now, store as strings) + prediction.CreatedAt = parseTime(createdAt) + prediction.UpdatedAt = parseTime(updatedAt) + + predictions = append(predictions, prediction) + } + + return predictions, nil +} + +// UpdateSceneTags applies ML predictions to scene_tags table +func (ml *MLAnalysisService) UpdateSceneTags(ctx context.Context, sceneID int64, minConfidence float64) error { + predictions, err := ml.GetSceneAnalysis(ctx, sceneID) + if err != nil { + return fmt.Errorf("failed to get scene analysis: %w", err) + } + + if len(predictions) == 0 { + return nil + } + + // Get the latest high-confidence predictions + latest := predictions[0] + for _, prediction := range predictions { + if prediction.Confidence > latest.Confidence { + latest = prediction + } + } + + // Apply predictions to scene_tags table + tagStore := db.NewTagStore(ml.db) + + for tagName, confidence := range latest.Predictions { + if confidence < minConfidence { + continue // Skip low-confidence predictions + } + + // Find or create the tag + tag, err := tagStore.FindOrCreate(tagName, "ml") + if err != nil { + log.Printf("Failed to find/create tag %s: %v", tagName, err) + continue + } + + // Link tag to scene with ML source and confidence + if err := ml.linkSceneToTag(ctx, sceneID, tag.ID, confidence, "ml"); err != nil { + log.Printf("Failed to link scene %d to tag %d: %v", sceneID, tag.ID, err) + } + } + + log.Printf("Applied %d ML predictions to scene %d", len(latest.Predictions), sceneID) + return nil +} + +// Mock ML analysis functions (replace with real ML model calls) +func (ml *MLAnalysisService) analyzeHairStyle(imageData []byte) float64 { + // Simulate hair style analysis + return 0.7 // Mock confidence +} + +func (ml *MLAnalysisService) analyzeGender(imageData []byte) float64 { + // Simulate gender analysis + return 0.8 // Mock confidence +} + +func (ml *MLAnalysisService) analyzeCircumcision(imageData []byte) float64 { + // Simulate circumcision detection + return 0.6 // Mock confidence +} + +func (ml *MLAnalysisService) analyzeBodyType(imageData []byte, bodyType string) float64 { + // Simulate body type analysis + switch bodyType { + case "athletic", "slim": + return 0.8 + case "curvy": + return 0.7 + case "bbw": + return 0.9 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeAgeCategory(imageData []byte, ageCat string) float64 { + // Simulate age category analysis + switch ageCat { + case "teen", "milf", "mature": + return 0.9 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeClothingColor(imageData []byte, color string) float64 { + // Simulate clothing color detection + switch color { + case "pink", "black", "red", "blue": + return 0.9 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeClothingType(imageData []byte, clothingType string) float64 { + // Simulate clothing type detection + switch clothingType { + case "thong", "heels": + return 0.85 + case "stockings", "lingerie": + return 0.75 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeSexualAct(imageData []byte, act string) float64 { + // Simulate sexual act detection + switch act { + case "creampie", "blowjob", "cowgirl", "doggy": + return 0.9 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzePosition(imageData []byte, position string) float64 { + // Simulate position detection + switch position { + case "cowgirl", "doggy": + return 0.85 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeSetting(imageData []byte, setting string) float64 { + // Simulate setting detection + switch setting { + case "bedroom", "couch": + return 0.8 + case "office": + return 0.6 + case "kitchen": + return 0.6 + case "bathroom": + return 0.6 + case "car": + return 0.7 + case "outdoor": + return 0.7 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) analyzeObject(imageData []byte, objectType string) float64 { + // Simulate object detection + switch objectType { + case "sofa": + return 0.8 + case "bed", "table": + return 0.9 + default: + return 0.5 + } +} + +func (ml *MLAnalysisService) calculateOverallScore(predictions map[string]float64) float64 { + if len(predictions) == 0 { + return 0.0 + } + + total := 0.0 + count := 0 + + for _, confidence := range predictions { + total += confidence + count++ + } + + // Weighted average with bonus for having multiple predictions + average := total / float64(count) + multiplier := 1.0 + (float64(count)-1.0)*0.1 // Bonus for comprehensive coverage + + return average * multiplier +} + +func (ml *MLAnalysisService) storeSceneAnalysis(ctx context.Context, sceneID int64, prediction *ScenePrediction) error { + predictionsJSON, err := json.Marshal(prediction.Predictions) + if err != nil { + return fmt.Errorf("failed to marshal predictions: %w", err) + } + + _, err = ml.db.Conn().Exec(` + INSERT INTO scene_ml_analysis (scene_id, model_version, prediction_type, predictions, confidence_score, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now')) + `, sceneID, prediction.Model, prediction.PredictionType, predictionsJSON, prediction.OverallScore) + + return err +} + +func (ml *MLAnalysisService) linkSceneToTag(ctx context.Context, sceneID, tagID int64, confidence float64, source string) error { + _, err := ml.db.Conn().Exec(` + INSERT OR REPLACE INTO scene_tags (scene_id, tag_id, confidence, source, verified, created_at) + VALUES (?, ?, ?, ?, ?, 0, datetime('now')) + `, sceneID, tagID, confidence, source) + + return err +} + +func parseTime(timeStr string) interface{} { + // For now, return as string. In real implementation, parse to time.Time + return timeStr +} diff --git a/internal/scraper/bulk.go b/internal/scraper/bulk.go new file mode 100644 index 0000000..6a9239c --- /dev/null +++ b/internal/scraper/bulk.go @@ -0,0 +1,117 @@ +package scraper + +import ( + "context" + "git.leaktechnologies.dev/stu/Goondex/internal/model" + adultemp "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" +) + +// BulkScraper interface defines bulk import capabilities +type BulkScraper interface { + SearchAllPerformers(ctx context.Context) ([]adultemp.SearchResult, error) + SearchAllStudios(ctx context.Context) ([]adultemp.SearchResult, error) + SearchAllScenes(ctx context.Context) ([]adultemp.SearchResult, error) + ConvertPerformerToModel(data interface{}) *model.Performer + ConvertStudioToModel(data interface{}) *model.Studio + ConvertSceneToModel(data interface{}) *model.Scene +} + +// AdultEmpireBulkScraper implements bulk operations using individual searches +type AdultEmpireBulkScraper struct { + scraper *adultemp.Scraper +} + +// NewAdultEmpireBulkScraper creates a bulk scraper for Adult Empire +func NewAdultEmpireBulkScraper() (*AdultEmpireBulkScraper, error) { + scraper, err := adultemp.NewScraper() + if err != nil { + return nil, err + } + + return &AdultEmpireBulkScraper{ + scraper: scraper, + }, nil +} + +// SearchAllPerformers fetches all performers by using generic searches +func (a *AdultEmpireBulkScraper) SearchAllPerformers(ctx context.Context) ([]adultemp.SearchResult, error) { + searchTerms := []string{"", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"} + + var allResults []adultemp.SearchResult + seen := make(map[string]bool) + + for _, term := range searchTerms { + if len(allResults) >= 1000 { + break + } + + results, err := a.scraper.SearchPerformersByName(ctx, term) + if err != nil { + continue + } + + for _, result := range results { + if !seen[result.URL] { + seen[result.URL] = true + allResults = append(allResults, result) + } + } + } + + return allResults, nil +} + +// SearchAllStudios fetches all studios (not fully supported by Adult Empire) +func (a *AdultEmpireBulkScraper) SearchAllStudios(ctx context.Context) ([]adultemp.SearchResult, error) { + // Adult Empire doesn't have dedicated studio search, return empty for now + return []adultemp.SearchResult{}, nil +} + +// SearchAllScenes fetches all scenes +func (a *AdultEmpireBulkScraper) SearchAllScenes(ctx context.Context) ([]adultemp.SearchResult, error) { + searchTerms := []string{"", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"} + + var allResults []adultemp.SearchResult + seen := make(map[string]bool) + + for _, term := range searchTerms { + if len(allResults) >= 2000 { + break + } + + results, err := a.scraper.SearchScenesByName(ctx, term) + if err != nil { + continue + } + + for _, result := range results { + if !seen[result.URL] { + seen[result.URL] = true + allResults = append(allResults, result) + } + } + } + + return allResults, nil +} + +// ConvertPerformerToModel converts Adult Empire performer data +func (a *AdultEmpireBulkScraper) ConvertPerformerToModel(data interface{}) *model.Performer { + if performerData, ok := data.(*adultemp.PerformerData); ok { + return a.scraper.ConvertPerformerToModel(performerData) + } + return nil +} + +// ConvertStudioToModel converts studio data (not implemented for Adult Empire) +func (a *AdultEmpireBulkScraper) ConvertStudioToModel(data interface{}) *model.Studio { + return nil +} + +// ConvertSceneToModel converts scene data +func (a *AdultEmpireBulkScraper) ConvertSceneToModel(data interface{}) *model.Scene { + if sceneData, ok := data.(*adultemp.SceneData); ok { + return a.scraper.ConvertSceneToModel(sceneData) + } + return nil +} diff --git a/internal/search/advanced.go b/internal/search/advanced.go new file mode 100644 index 0000000..82449bf --- /dev/null +++ b/internal/search/advanced.go @@ -0,0 +1,439 @@ +package search + +import ( + "database/sql" + "fmt" + "math" + "strings" + "time" + + "git.leaktechnologies.dev/stu/Goondex/internal/db" + "git.leaktechnologies.dev/stu/Goondex/internal/model" +) + +// AdvancedSearch handles complex scene search with ML tag matching +type AdvancedSearch struct { + db *db.DB + parser *Parser + sceneStore *db.SceneStore + performerStore *db.PerformerStore + tagStore *db.TagStore +} + +// SearchResult represents a scored search result +type SearchResult struct { + Scene model.Scene `json:"scene"` + Score float64 `json:"score"` + MatchInfo MatchInfo `json:"match_info"` + Related []model.Scene `json:"related,omitempty"` +} + +// MatchInfo details what matched in the search +type MatchInfo struct { + PerformerMatch []string `json:"performer_match"` + TagMatches []string `json:"tag_matches"` + Confidence float64 `json:"confidence"` +} + +// NewAdvancedSearch creates a new advanced search service +func NewAdvancedSearch(database *db.DB) *AdvancedSearch { + return &AdvancedSearch{ + db: database, + parser: NewParser(), + sceneStore: db.NewSceneStore(database), + performerStore: db.NewPerformerStore(database), + tagStore: db.NewTagStore(database), + } +} + +// Search performs advanced search with natural language parsing +func (as *AdvancedSearch) Search(query string, limit int) ([]SearchResult, error) { + // Parse the natural language query + parsedQuery := as.parser.Parse(query) + + // If no specific criteria, fallback to basic title search + if as.isSimpleQuery(parsedQuery) { + return as.basicSearch(query, limit) + } + + // Perform advanced tag-based search + return as.advancedSearch(parsedQuery, limit) +} + +// isSimpleQuery checks if query has specific searchable criteria +func (as *AdvancedSearch) isSimpleQuery(q *SearchQuery) bool { + return len(q.Performers) == 0 && len(q.Actions) == 0 && + len(q.Clothing) == 0 && len(q.Colors) == 0 && + len(q.AgeCategories) == 0 && len(q.Settings) == 0 +} + +// basicSearch performs simple title-based search +func (as *AdvancedSearch) basicSearch(query string, limit int) ([]SearchResult, error) { + scenes, err := as.sceneStore.Search(query) + if err != nil { + return nil, err + } + + results := make([]SearchResult, len(scenes)) + for i, scene := range scenes { + results[i] = SearchResult{ + Scene: scene, + Score: as.calculateTitleScore(scene.Title, query), + MatchInfo: MatchInfo{ + Confidence: 0.5, + }, + } + } + + return results, nil +} + +// advancedSearch performs complex tag-based search +func (as *AdvancedSearch) advancedSearch(q *SearchQuery, limit int) ([]SearchResult, error) { + var results []SearchResult + + // Search by performer names first + if len(q.Performers) > 0 { + performerResults, err := as.searchByPerformers(q.Performers, limit) + if err != nil { + return nil, err + } + results = append(results, performerResults...) + } + + // Search by tags (actions, clothing, colors, etc.) + tagResults, err := as.searchByTags(q, limit) + if err != nil { + return nil, err + } + results = append(results, tagResults...) + + // Remove duplicates and sort by score + results = as.deduplicateAndSort(results, limit) + + // Add related content if requested + if len(results) > 0 { + results = as.addRelatedContent(results) + } + + return results, nil +} + +// searchByPerformers finds scenes with specific performers +func (as *AdvancedSearch) searchByPerformers(performerNames []string, limit int) ([]SearchResult, error) { + var results []SearchResult + + for _, name := range performerNames { + performers, err := as.performerStore.Search(name) + if err != nil { + continue + } + + for _, performer := range performers { + scenes, err := as.getScenesByPerformer(performer.ID) + if err != nil { + continue + } + + for _, scene := range scenes { + score := 1.0 // Perfect match for performer + if !strings.Contains(strings.ToLower(scene.Title), strings.ToLower(name)) { + score = 0.8 // Scene exists but name not in title + } + + results = append(results, SearchResult{ + Scene: scene, + Score: score, + MatchInfo: MatchInfo{ + PerformerMatch: []string{name}, + Confidence: score, + }, + }) + } + } + } + + return results, nil +} + +// searchByTags finds scenes matching various tag categories +func (as *AdvancedSearch) searchByTags(q *SearchQuery, limit int) ([]SearchResult, error) { + // Build complex SQL query for tag matching + whereClauses := []string{} + args := []interface{}{} + + // Add clothing color tags + for _, color := range q.Colors { + whereClauses = append(whereClauses, "t.name LIKE ?") + args = append(args, "%"+color+"%") + } + + // Add clothing type tags + for _, clothing := range q.Clothing { + whereClauses = append(whereClauses, "t.name LIKE ?") + args = append(args, "%"+clothing+"%") + } + + // Add action tags + for _, action := range q.Actions { + whereClauses = append(whereClauses, "t.name LIKE ?") + args = append(args, "%"+action+"%") + } + + // Add age category tags + for _, age := range q.AgeCategories { + whereClauses = append(whereClauses, "t.name LIKE ?") + args = append(args, "%"+age+"%") + } + + // Add setting tags + for _, setting := range q.Settings { + whereClauses = append(whereClauses, "t.name LIKE ?") + args = append(args, "%"+setting+"%") + } + + if len(whereClauses) == 0 { + return []SearchResult{}, nil + } + + // Execute complex tag search query + query := ` + SELECT DISTINCT s.*, COUNT(st.tag_id) as match_count, AVG(st.confidence) as avg_confidence + FROM scenes s + INNER JOIN scene_tags st ON s.id = st.scene_id + INNER JOIN tags t ON st.tag_id = t.id + WHERE ` + strings.Join(whereClauses, " OR ") + ` + GROUP BY s.id + ORDER BY match_count DESC, avg_confidence DESC + LIMIT ? + ` + + args = append(args, limit*2) // Get more for deduplication + + rows, err := as.db.Conn().Query(query, args...) + if err != nil { + return nil, fmt.Errorf("tag search failed: %w", err) + } + defer rows.Close() + + return as.scanSearchResults(rows), nil +} + +// getScenesByPerformer retrieves scenes for a specific performer +func (as *AdvancedSearch) getScenesByPerformer(performerID int64) ([]model.Scene, error) { + rows, err := as.db.Conn().Query(` + SELECT s.id, s.title, COALESCE(s.code, ''), COALESCE(s.date, ''), + COALESCE(s.studio_id, 0), COALESCE(s.description, ''), + COALESCE(s.image_path, ''), COALESCE(s.image_url, ''), + COALESCE(s.director, ''), COALESCE(s.url, ''), + COALESCE(s.source, ''), COALESCE(s.source_id, ''), + s.created_at, s.updated_at + FROM scenes s + INNER JOIN scene_performers sp ON s.id = sp.scene_id + WHERE sp.performer_id = ? + ORDER BY s.date DESC, s.title + `, performerID) + + if err != nil { + return nil, err + } + defer rows.Close() + + return as.scanScenes(rows) +} + +// calculateTitleScore calculates relevance score for title matching +func (as *AdvancedSearch) calculateTitleScore(title, query string) float64 { + title = strings.ToLower(title) + query = strings.ToLower(query) + + // Exact match + if title == query { + return 1.0 + } + + // Title contains query + if strings.Contains(title, query) { + return 0.8 + } + + // Query contains title + if strings.Contains(query, title) { + return 0.6 + } + + // Word overlap + titleWords := strings.Fields(title) + queryWords := strings.Fields(query) + matches := 0 + + for _, qWord := range queryWords { + for _, tWord := range titleWords { + if qWord == tWord { + matches++ + break + } + } + } + + if len(queryWords) == 0 { + return 0.0 + } + + return float64(matches) / float64(len(queryWords)) * 0.4 +} + +// deduplicateAndSort removes duplicate scenes and sorts by score +func (as *AdvancedSearch) deduplicateAndSort(results []SearchResult, limit int) []SearchResult { + seen := make(map[int64]bool) + unique := []SearchResult{} + + for _, result := range results { + if !seen[result.Scene.ID] { + seen[result.Scene.ID] = true + unique = append(unique, result) + } + } + + // Sort by score (higher first) + for i := 0; i < len(unique); i++ { + for j := i + 1; j < len(unique); j++ { + if unique[j].Score > unique[i].Score { + unique[i], unique[j] = unique[j], unique[i] + } + } + } + + if len(unique) > limit { + unique = unique[:limit] + } + + return unique +} + +// addRelatedContent adds related scenes to search results +func (as *AdvancedSearch) addRelatedContent(results []SearchResult) []SearchResult { + if len(results) == 0 { + return results + } + + // For now, add scenes from same studio or performers + baseScene := results[0].Scene + related, err := as.findRelatedScenes(baseScene.ID, *baseScene.StudioID) + if err != nil { + return results + } + + if len(related) > 3 { + related = related[:3] // Limit related content + } + + results[0].Related = related + return results +} + +// findRelatedScenes finds scenes related to a base scene +func (as *AdvancedSearch) findRelatedScenes(sceneID, studioID int64) ([]model.Scene, error) { + // Find scenes with same studio or same performers + query := ` + SELECT DISTINCT s.id, s.title, COALESCE(s.code, ''), COALESCE(s.date, ''), + COALESCE(s.studio_id, 0), COALESCE(s.description, ''), + COALESCE(s.image_path, ''), COALESCE(s.image_url, ''), + COALESCE(s.director, ''), COALESCE(s.url, ''), + COALESCE(s.source, ''), COALESCE(s.source_id, ''), + s.created_at, s.updated_at + FROM scenes s + WHERE (s.studio_id = ? OR s.id IN ( + SELECT sp2.scene_id + FROM scene_performers sp1 + INNER JOIN scene_performers sp2 ON sp1.performer_id = sp2.performer_id + WHERE sp1.scene_id = ? AND sp2.scene_id != ? + )) AND s.id != ? + ORDER BY s.date DESC + LIMIT 10 + ` + + rows, err := as.db.Conn().Query(query, studioID, sceneID, sceneID, sceneID) + if err != nil { + return nil, err + } + defer rows.Close() + + return as.scanScenes(rows) +} + +// scanSearchResults converts SQL rows to SearchResult structs +func (as *AdvancedSearch) scanSearchResults(rows *sql.Rows) []SearchResult { + var results []SearchResult + + for rows.Next() { + var scene model.Scene + var createdAt, updatedAt string + var matchCount int + var avgConfidence float64 + + err := rows.Scan( + &scene.ID, &scene.Title, &scene.Code, &scene.Date, &scene.StudioID, + &scene.Description, &scene.ImagePath, &scene.ImageURL, &scene.Director, + &scene.URL, &scene.Source, &scene.SourceID, &createdAt, &updatedAt, + &matchCount, &avgConfidence, + ) + + if err != nil { + continue + } + + // Parse timestamps + if parsedTime, err := time.Parse("2006-01-02 15:04:05", createdAt); err == nil { + scene.CreatedAt = parsedTime + } + if parsedTime, err := time.Parse("2006-01-02 15:04:05", updatedAt); err == nil { + scene.UpdatedAt = parsedTime + } + + // Calculate composite score + score := math.Min(avgConfidence*0.7+float64(matchCount)*0.3, 1.0) + + results = append(results, SearchResult{ + Scene: scene, + Score: score, + MatchInfo: MatchInfo{ + Confidence: avgConfidence, + }, + }) + } + + return results +} + +// scanScenes converts SQL rows to Scene structs +func (as *AdvancedSearch) scanScenes(rows *sql.Rows) ([]model.Scene, error) { + var scenes []model.Scene + + for rows.Next() { + var scene model.Scene + var createdAt, updatedAt string + + err := rows.Scan( + &scene.ID, &scene.Title, &scene.Code, &scene.Date, &scene.StudioID, + &scene.Description, &scene.ImagePath, &scene.ImageURL, &scene.Director, + &scene.URL, &scene.Source, &scene.SourceID, &createdAt, &updatedAt, + ) + + if err != nil { + continue + } + + // Parse timestamps + if parsedTime, err := time.Parse("2006-01-02 15:04:05", createdAt); err == nil { + scene.CreatedAt = parsedTime + } + if parsedTime, err := time.Parse("2006-01-02 15:04:05", updatedAt); err == nil { + scene.UpdatedAt = parsedTime + } + + scenes = append(scenes, scene) + } + + return scenes, nil +} diff --git a/internal/search/parser.go b/internal/search/parser.go new file mode 100644 index 0000000..891522a --- /dev/null +++ b/internal/search/parser.go @@ -0,0 +1,200 @@ +package search + +import ( + "regexp" + "strings" +) + +// SearchQuery represents a parsed search query +type SearchQuery struct { + Original string + Performers []string + Actions []string + Clothing []string + Colors []string + BodyTypes []string + AgeCategories []string + Ethnicities []string + Settings []string + Positions []string + Production []string + Requirements []string // must-have terms + Preferences []string // nice-to-have terms +} + +// Parser handles natural language search query parsing +type Parser struct { + // Keyword mappings for different categories + actions map[string]bool + clothing map[string]bool + colors map[string]bool + bodyTypes map[string]bool + ageCategories map[string]bool + ethnicities map[string]bool + settings map[string]bool + positions map[string]bool + production map[string]bool +} + +// NewParser creates a new search query parser +func NewParser() *Parser { + p := &Parser{ + actions: make(map[string]bool), + clothing: make(map[string]bool), + colors: make(map[string]bool), + bodyTypes: make(map[string]bool), + ageCategories: make(map[string]bool), + ethnicities: make(map[string]bool), + settings: make(map[string]bool), + positions: make(map[string]bool), + production: make(map[string]bool), + } + + // Initialize keyword mappings + p.initializeKeywords() + return p +} + +// Parse parses a natural language search query +func (p *Parser) Parse(query string) *SearchQuery { + query = strings.ToLower(query) + query = strings.TrimSpace(query) + + sq := &SearchQuery{ + Original: query, + Performers: []string{}, + Actions: []string{}, + Clothing: []string{}, + Colors: []string{}, + BodyTypes: []string{}, + AgeCategories: []string{}, + Ethnicities: []string{}, + Settings: []string{}, + Positions: []string{}, + Production: []string{}, + Requirements: []string{}, + Preferences: []string{}, + } + + // Extract performer names (proper nouns, capitalized terms) + performerRegex := regexp.MustCompile(`\b([A-Z][a-z]+(?:\s+[A-Z][a-z]+)*)\b`) + matches := performerRegex.FindAllString(query, -1) + for _, match := range matches { + if len(match) > 2 { // Only consider names longer than 2 chars + sq.Performers = append(sq.Performers, match) + } + } + + // Extract age-specific terms + if strings.Contains(query, "teen") || strings.Contains(query, "teenage") { + sq.AgeCategories = append(sq.AgeCategories, "teen") + } + if strings.Contains(query, "milf") { + sq.AgeCategories = append(sq.AgeCategories, "milf") + } + if strings.Contains(query, "mature") { + sq.AgeCategories = append(sq.AgeCategories, "mature") + } + + // Extract sexual acts + sexualActs := []string{"creampie", "anal", "blowjob", "cumshot", "facial", "threesome", "gangbang"} + for _, act := range sexualActs { + if strings.Contains(query, act) { + sq.Actions = append(sq.Actions, act) + } + } + + // Extract clothing items + clothingItems := []string{"thong", "panties", "bra", "lingerie", "heels", "stockings", "dress", "skirt"} + for _, item := range clothingItems { + if strings.Contains(query, item) { + sq.Clothing = append(sq.Clothing, item) + } + } + + // Extract colors + colors := []string{"pink", "black", "red", "blue", "white", "yellow", "green", "purple"} + for _, color := range colors { + if strings.Contains(query, color) { + sq.Colors = append(sq.Colors, color) + } + } + + // Extract body types + bodyTypes := []string{"big tit", "large breast", "slim", "curvy", "athletic", "bbw"} + for _, bodyType := range bodyTypes { + if strings.Contains(query, bodyType) { + sq.BodyTypes = append(sq.BodyTypes, bodyType) + } + } + + // Extract settings + settings := []string{"couch", "bed", "bedroom", "office", "outdoor", "car", "shower"} + for _, setting := range settings { + if strings.Contains(query, setting) { + sq.Settings = append(sq.Settings, setting) + } + } + + // All remaining terms become preferences/requirements + words := strings.Fields(query) + for _, word := range words { + if len(word) > 2 && !p.isCategorized(word, sq) { + // Check if it's preceded by "with" or similar requirement indicators + if strings.Contains(query, "with "+word) || strings.Contains(query, "has "+word) { + sq.Requirements = append(sq.Requirements, word) + } else { + sq.Preferences = append(sq.Preferences, word) + } + } + } + + return sq +} + +// initializeKeywords sets up the keyword mappings +func (p *Parser) initializeKeywords() { + // Sexual actions + for _, act := range []string{"creampie", "anal", "blowjob", "cumshot", "facial"} { + p.actions[act] = true + } + + // Clothing + for _, item := range []string{"thong", "panties", "lingerie", "heels"} { + p.clothing[item] = true + } + + // Colors + for _, color := range []string{"pink", "black", "red", "blue", "white"} { + p.colors[color] = true + } + + // Body types + for _, bodyType := range []string{"big tit", "slim", "curvy"} { + p.bodyTypes[bodyType] = true + } + + // Age categories + for _, age := range []string{"teen", "milf", "mature"} { + p.ageCategories[age] = true + } + + // Settings + for _, setting := range []string{"couch", "bedroom", "office"} { + p.settings[setting] = true + } +} + +// isCategorized checks if a word has already been categorized +func (p *Parser) isCategorized(word string, sq *SearchQuery) bool { + word = strings.ToLower(word) + + for _, performer := range sq.Performers { + if strings.Contains(strings.ToLower(performer), word) { + return true + } + } + + return p.actions[word] || p.clothing[word] || p.colors[word] || + p.bodyTypes[word] || p.ageCategories[word] || p.settings[word] +} diff --git a/internal/web/server.go b/internal/web/server.go index a962488..5a45fed 100644 --- a/internal/web/server.go +++ b/internal/web/server.go @@ -18,8 +18,8 @@ import ( "git.leaktechnologies.dev/stu/Goondex/internal/db" import_service "git.leaktechnologies.dev/stu/Goondex/internal/import" "git.leaktechnologies.dev/stu/Goondex/internal/model" - "git.leaktechnologies.dev/stu/Goondex/internal/scraper/adultemp" - "git.leaktechnologies.dev/stu/Goondex/internal/scraper/tpdb" + "git.leaktechnologies.dev/stu/Goondex/internal/scraper" + "git.leaktechnologies.dev/stu/Goondex/internal/search" "git.leaktechnologies.dev/stu/Goondex/internal/sync" ) @@ -1113,28 +1113,30 @@ func (s *Server) handleAPIBulkImportPerformers(w http.ResponseWriter, r *http.Re w.Header().Set("Content-Type", "application/json") - apiKey, err := tpdbAPIKey() - if writeTPDBError(w, err) { - return - } - - scraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) - service := import_service.NewService(s.db, scraper) - if enricher, err := import_service.NewEnricher(s.db, 1*time.Second); err == nil { - service.WithEnricher(enricher) - } - - result, err := service.BulkImportAllPerformers(context.Background()) + // Try Adult Empire first (primary scraper for new imports) + bulkScraper, err := scraper.NewAdultEmpireBulkScraper() if err != nil { - json.NewEncoder(w).Encode(APIResponse{Success: false, Message: fmt.Sprintf("Import failed: %v", err)}) + // Fall back to TPDB if Adult Empire fails + apiKey, keyErr := tpdbAPIKey() + if writeTPDBError(w, keyErr) { + return + } + + tpdbScraper := tpdb.NewScraper("https://api.theporndb.net", apiKey) + service := import_service.NewService(s.db, tpdbScraper) + if enricher, enrichErr := import_service.NewEnricher(s.db, 1*time.Second); enrichErr == nil { + service.WithEnricher(enricher) + } + + result, err := service.BulkImportAllPerformers(context.Background()) + s.writeImportResult(w, result, err, "Performers") return } - json.NewEncoder(w).Encode(APIResponse{ - Success: true, - Message: fmt.Sprintf("Imported %d/%d performers", result.Imported, result.Total), - Data: result, - }) + // Use Adult Empire scraper + service := import_service.NewFlexibleService(s.db, bulkScraper) + result, err := service.BulkImportAllPerformersFlexible(context.Background()) + s.writeImportResult(w, result, err, "Performers") } func (s *Server) handleAPIBulkImportStudios(w http.ResponseWriter, r *http.Request) { @@ -1329,6 +1331,11 @@ func (s *Server) handleAPIBulkImportScenesProgress(w http.ResponseWriter, r *htt // ============================================================================ func (s *Server) handleAPIGlobalSearch(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + query := r.URL.Query().Get("q") if query == "" { json.NewEncoder(w).Encode(APIResponse{ @@ -1338,29 +1345,36 @@ func (s *Server) handleAPIGlobalSearch(w http.ResponseWriter, r *http.Request) { return } - performerStore := db.NewPerformerStore(s.db) - studioStore := db.NewStudioStore(s.db) - sceneStore := db.NewSceneStore(s.db) - tagStore := db.NewTagStore(s.db) + // Use advanced search for complex queries + advancedSearch := search.NewAdvancedSearch(s.db) + results, err := advancedSearch.Search(query, 20) + if err != nil { + json.NewEncoder(w).Encode(APIResponse{ + Success: false, + Message: fmt.Sprintf("Search failed: %v", err), + }) + return + } - performers, _ := performerStore.Search(query) - studios, _ := studioStore.Search(query) - scenes, _ := sceneStore.Search(query) - tags, _ := tagStore.Search(query) + // Convert to format expected by frontend + scenes := make([]model.Scene, len(results)) + for i, result := range results { + scenes[i] = result.Scene + } - results := map[string]interface{}{ - "performers": performers, - "studios": studios, - "scenes": scenes, - "tags": tags, - "total": len(performers) + len(studios) + len(scenes) + len(tags), + response := map[string]interface{}{ + "scenes": scenes, + "total": len(results), + "advanced": true, + "search_query": query, } json.NewEncoder(w).Encode(APIResponse{ Success: true, - Message: fmt.Sprintf("Found %d results", results["total"]), - Data: results, + Message: fmt.Sprintf("Found %d advanced results", len(results)), + Data: response, }) + } // ============================================================================ diff --git a/test-logo-standalone.html b/test-logo-standalone.html index b33e7c0..afd6890 100644 --- a/test-logo-standalone.html +++ b/test-logo-standalone.html @@ -6,34 +6,31 @@ body { background: #1a1a1a; color: white; padding: 2rem; font-family: Arial, sans-serif; } .logo { margin: 2rem 0; width: 180px; height: 110px; } .logo svg { width: 100%; height: 100%; display: block; } - .goondex-logo-animated .breast-left, - .goondex-logo-animated .breast-right { - animation: breastBounce 1.6s ease-in-out infinite; - transform-origin: center center; + .goondex-logo-animated { + animation: logoBounce 1.5s ease-in-out infinite; } - .goondex-logo-animated .breast-right { animation-delay: 0.08s; } .goondex-logo-animated .nipple-left, .goondex-logo-animated .nipple-right { - animation: nippleBob 1.6s ease-in-out infinite; - transform-origin: center center; + animation: nippleBounce 1.5s ease-in-out infinite; } - .goondex-logo-animated .nipple-right { animation-delay: 0.12s; } - - @keyframes breastBounce { - 0% { transform: translateY(0) scale(1); } - 12% { transform: translateY(-4px) scaleX(1.01) scaleY(0.985); } - 28% { transform: translateY(8px) scaleX(0.99) scaleY(1.03); } - 44% { transform: translateY(-3px) scaleX(1.012) scaleY(0.988); } - 60% { transform: translateY(4px) scaleX(0.995) scaleY(1.015); } - 100% { transform: translateY(0) scale(1); } + .goondex-logo-animated .nipple-right { + animation-delay: 0.1s; } - @keyframes nippleBob { - 0%, 100% { transform: translate(0, 0); } - 18% { transform: translate(0px, -5px) scale(1.03); } - 35% { transform: translate(0px, 6px) scale(0.98); } - 55% { transform: translate(0px, -3px) scale(1.02); } - 75% { transform: translate(0px, 2px); } + @keyframes logoBounce { + 0% { transform: translateY(0) scaleY(1); } + 15% { transform: translateY(-12px) scaleY(1.02); } + 30% { transform: translateY(0) scaleY(0.85); } + 40% { transform: translateY(3px) scaleY(1.05); } + 100% { transform: translateY(0) scaleY(1); } + } + + @keyframes nippleBounce { + 0%, 100% { transform: translateY(0); } + 15% { transform: translateY(-10px); } + 30% { transform: translateY(0); } + 40% { transform: translateY(2px); } + 100% { transform: translateY(0); } } button { background: #ff5fa2; color: white; border: none; padding: 0.5rem 1rem; border-radius: 4px; margin-right: 1rem; cursor: pointer; } @@ -73,12 +70,16 @@

Static Logo:

- +

Animated Logo:

- +
@@ -92,7 +93,9 @@ @@ -106,58 +109,26 @@ init(svgElement) { this.logoElement = svgElement; - this.identifyParts(); + this.identifyNipples(); } - - identifyParts() { if (!this.logoElement) return; - const nipples = []; - const breasts = []; - - // Prefer elements with ids/classes if present - const breastCandidates = [ - this.logoElement.querySelector('#breast-left'), - this.logoElement.querySelector('#breast-right') - ].filter(Boolean); - const nippleCandidates = [ - this.logoElement.querySelector('#nipple-left'), - this.logoElement.querySelector('#nipple-right') - ].filter(Boolean); - - breasts.push(...breastCandidates); - nipples.push(...nippleCandidates); - - // Fallback nipples: first two circles/ellipses - if (nipples.length < 2) { - const circ = Array.from(this.logoElement.querySelectorAll('circle, ellipse')); - while (nipples.length < 2 && circ.length) { - nipples.push(circ.shift()); + + const paths = this.logoElement.querySelectorAll('path'); + let nippleIndex = 0; + + paths.forEach((path) => { + const d = path.getAttribute('d'); + if (d && d.includes('1463.5643,67.636337')) { + path.classList.add('nipple-left'); + nippleIndex++; + } else if (d && d.includes('70.4489,0') && nippleIndex === 1) { + path.classList.add('nipple-right'); + nippleIndex++; } - } - - // Fallback breasts: first two paths/shapes - if (breasts.length < 2) { - const shapes = Array.from(this.logoElement.querySelectorAll('path, polygon, rect')); - while (breasts.length < 2 && shapes.length) { - breasts.push(shapes.shift()); - } - } - - // Ultimate fallback: animate whole svg as a single breast pair - if (breasts.length === 0) breasts.push(this.logoElement); - if (breasts.length === 1) breasts.push(this.logoElement); - - if (breasts[0]) breasts[0].classList.add('breast-left'); - if (breasts[1]) breasts[1].classList.add('breast-right'); - - if (nipples.length === 0) { - // If no explicit nipples, piggyback on breasts so some motion happens - nipples.push(breasts[0], breasts[1]); - } - nipples.slice(0, 2).forEach((el, idx) => { - if (el) el.classList.add(idx === 0 ? 'nipple-left' : 'nipple-right'); }); } + }); + } startBounce() { if (!this.logoElement || this.isAnimating) return; @@ -172,19 +143,7 @@ this.logoElement.classList.remove('goondex-logo-animated'); this.isAnimating = false; } - } - - // Inline-load the SVG so we can animate internals - // INLINE ANIMATOR (self-contained for this test page) - class LogoAnimator { - constructor() { - this.isAnimating = false; - this.logoElement = null; - } - - init(svgElement) { - this.logoElement = svgElement; - this.identifyParts(); +} } identifyParts() { @@ -259,24 +218,25 @@ "/static/img/logo/GOONDEX_Titty.svg", "static/img/logo/GOONDEX_Titty.svg", "./static/img/logo/GOONDEX_Titty.svg" - ]; + let animator = null; let loaderAnimator = null; - async function initLogos() { - const staticSvg = await loadSVG(logoURLs, 'static-logo'); - const animatedSvg = await loadSVG(logoURLs, 'animated-logo'); - const loaderSvg = await loadSVG(logoURLs, 'loader-logo'); - - if (animatedSvg) { +function initLogos() { + const animatedLogo = document.querySelector('#animated-logo img'); + const loaderLogo = document.querySelector('#loader-logo img'); + + if (animatedLogo) { animator = new LogoAnimator(); - animator.init(animatedSvg); - animator.startBounce(); + animator.init(animatedLogo); + console.log('Animator initialized'); } - if (loaderSvg) { + + if (loaderLogo) { loaderAnimator = new LogoAnimator(); - loaderAnimator.init(loaderSvg); + loaderAnimator.init(loaderLogo); + console.log('Loader animator initialized'); } }