Search/search-engine.go

152 lines
4.7 KiB
Go
Raw Permalink Normal View History

package main
import (
2024-06-29 19:27:48 +00:00
"encoding/json"
"fmt"
"log"
"math/rand"
2024-06-29 19:27:48 +00:00
"net/http"
"sync"
"time"
)
var (
searchEngineLock sync.Mutex
2024-06-29 19:27:48 +00:00
searchEngines []SearchEngine // Ensure this variable is defined
)
// SearchEngine struct now includes metrics for calculating reputation.
type SearchEngine struct {
Name string
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
Weight int
TotalRequests int
TotalTime time.Duration
SuccessfulSearches int
FailedSearches int
2024-06-29 19:27:48 +00:00
IsCrawler bool // Indicates if this search engine is a crawler
Host string // Host of the crawler
Port int // Port of the crawler
AuthCode string // Auth code for the crawler
}
// init function seeds the random number generator.
func init() {
rand.Seed(time.Now().UnixNano())
2024-06-29 19:27:48 +00:00
// Initialize the searchEngines list
searchEngines = []SearchEngine{
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5},
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
}
}
// Selects a search engine based on weighted random selection with dynamic weighting.
func selectSearchEngine(engines []SearchEngine) SearchEngine {
searchEngineLock.Lock()
defer searchEngineLock.Unlock()
// Recalculate weights based on average response time and success rate.
for i := range engines {
engines[i].Weight = calculateReputation(engines[i])
}
totalWeight := 0
for _, engine := range engines {
totalWeight += engine.Weight
}
randValue := rand.Intn(totalWeight)
for _, engine := range engines {
if randValue < engine.Weight {
return engine
}
randValue -= engine.Weight
}
return engines[0] // fallback to the first engine
}
// Updates the engine's performance metrics.
func updateEngineMetrics(engine *SearchEngine, responseTime time.Duration, success bool) {
searchEngineLock.Lock()
defer searchEngineLock.Unlock()
engine.TotalRequests++
engine.TotalTime += responseTime
if success {
engine.SuccessfulSearches++
} else {
engine.FailedSearches++
}
engine.Weight = calculateReputation(*engine)
}
// Calculates the reputation of the search engine based on average response time and success rate.
func calculateReputation(engine SearchEngine) int {
const referenceTime = time.Second // 1 second reference time in nanoseconds (1000 ms)
if engine.TotalRequests == 0 {
return 10 // Default weight for new engines
}
// Calculate average response time in seconds.
avgResponseTime := engine.TotalTime.Seconds() / float64(engine.TotalRequests)
// Calculate success rate.
successRate := float64(engine.SuccessfulSearches) / float64(engine.TotalRequests)
// Combine response time and success rate into a single reputation score.
// The formula can be adjusted to weigh response time and success rate differently.
reputation := (referenceTime.Seconds() / avgResponseTime) * successRate
// Scale reputation for better interpretability (e.g., multiply by 10)
return int(reputation * 10)
}
2024-06-29 19:27:48 +00:00
func fetchSearchResults(query, safe, lang, searchType string, page int) []SearchResult {
var results []SearchResult
engine := selectSearchEngine(searchEngines)
log.Printf("Using search engine: %s", engine.Name)
if engine.IsCrawler {
searchResults, duration, err := fetchSearchFromCrawler(engine, query, safe, lang, searchType, page)
updateEngineMetrics(&engine, duration, err == nil)
if err != nil {
log.Printf("Error performing search with crawler %s: %v", engine.Name, err)
return nil
}
results = append(results, searchResults...)
} else {
searchResults, duration, err := engine.Func(query, safe, lang, page)
updateEngineMetrics(&engine, duration, err == nil)
if err != nil {
log.Printf("Error performing search with %s: %v", engine.Name, err)
return nil
}
results = append(results, searchResults...)
}
return results
}
func fetchSearchFromCrawler(engine SearchEngine, query, safe, lang, searchType string, page int) ([]SearchResult, time.Duration, error) {
url := fmt.Sprintf("http://%s:%d/search?q=%s&safe=%s&lang=%s&t=%s&p=%d", engine.Host, engine.Port, query, safe, lang, searchType, page)
start := time.Now()
resp, err := http.Get(url)
if err != nil {
return nil, 0, err
}
defer resp.Body.Close()
var results []SearchResult
if err := json.NewDecoder(resp.Body).Decode(&results); err != nil {
return nil, 0, err
}
return results, time.Since(start), nil
}