merge work repo to main #1

Merged
partisan merged 4 commits from work into main 2024-06-10 10:03:44 +00:00
5 changed files with 99 additions and 157 deletions
Showing only changes of commit dae80c0684 - Show all commits

2
run.sh
View file

@ -1,3 +1,3 @@
#!/bin/bash
go run main.go images.go imageproxy.go video.go map.go text.go text-searchxng.go text-librex.go text-google.go cache.go forums.go files.go files-torrentgalaxy.go files-thepiratebay.go agent.go --debug
go run main.go images.go imageproxy.go video.go map.go text.go text-searchxng.go text-librex.go text-google.go cache.go forums.go files.go files-torrentgalaxy.go files-thepiratebay.go agent.go

View file

@ -11,9 +11,9 @@ import (
"github.com/PuerkitoBio/goquery"
)
func PerformDuckDuckGoTextSearch(query, safe, lang string) ([]TextSearchResult, error) {
func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
var results []TextSearchResult
searchURL := fmt.Sprintf("https://duckduckgo.com/html/?q=%s", url.QueryEscape(query))
searchURL := buildDuckDuckGoSearchURL(query, page)
resp, err := http.Get(searchURL)
if err != nil {
@ -56,3 +56,11 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string) ([]TextSearchResult,
return results, nil
}
func buildDuckDuckGoSearchURL(query string, page int) string {
startParam := ""
if page > 1 {
startParam = fmt.Sprintf("&s=%d", (page-1)*10)
}
return fmt.Sprintf("https://duckduckgo.com/html/?q=%s%s", url.QueryEscape(query), startParam)
}

View file

@ -11,46 +11,27 @@ import (
"github.com/chromedp/chromedp"
)
// type TextSearchResult struct {
// URL string
// Header string
// Description string
// }
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
opts := append(chromedp.DefaultExecAllocatorOptions[:],
chromedp.DisableGPU,
chromedp.NoDefaultBrowserCheck,
chromedp.NoFirstRun,
chromedp.Flag("disable-javascript", true),
)
ctx, cancel := chromedp.NewExecAllocator(context.Background(), opts...)
defer cancel()
// func main() {
// // Example usage
// results, err := PerformGoogleTextSearch("golang", "off", "lang_en", 2)
// if err != nil {
// log.Fatalf("Error performing search: %v", err)
// }
// for _, result := range results {
// fmt.Printf("URL: %s\nHeader: %s\nDescription: %s\n", result.URL, result.Header, result.Description)
// }
// }
func PerformGoogleTextSearch(query, safe, lang string, numPages int) ([]TextSearchResult, error) {
ctx, cancel := chromedp.NewContext(context.Background())
ctx, cancel = chromedp.NewContext(ctx)
defer cancel()
var results []TextSearchResult
searchURL := buildSearchURL(query, safe, lang, 1, 10)
err := chromedp.Run(ctx,
chromedp.Navigate(searchURL),
)
if err != nil {
return nil, fmt.Errorf("failed to navigate to search URL: %v", err)
}
for page := 1; page <= numPages; page++ {
searchURL := buildSearchURL(query, safe, lang, page, 10)
var pageSource string
err := chromedp.Run(ctx,
chromedp.Navigate(searchURL),
chromedp.Sleep(2*time.Second),
chromedp.OuterHTML("html", &pageSource),
chromedp.Evaluate(`window.scrollTo(0, document.body.scrollHeight);`, nil),
)
if err != nil {
return nil, fmt.Errorf("failed to retrieve page source: %v", err)
@ -61,7 +42,6 @@ func PerformGoogleTextSearch(query, safe, lang string, numPages int) ([]TextSear
return nil, fmt.Errorf("error parsing results: %v", err)
}
results = append(results, newResults...)
}
return results, nil
}
@ -77,7 +57,9 @@ func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
langParam = "&lr=" + lang
}
return fmt.Sprintf("https://www.google.com/search?q=%s%s%s", url.QueryEscape(query), safeParam, langParam)
startParam := fmt.Sprintf("&start=%d", (page-1)*resultsPerPage)
return fmt.Sprintf("https://www.google.com/search?q=%s%s%s%s", url.QueryEscape(query), safeParam, langParam, startParam)
}
func parseResults(pageSource string) ([]TextSearchResult, error) {

View file

@ -20,7 +20,7 @@ type LibreXResponse []LibreXResult
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
// LibreX uses page starting from 0
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page-1)
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
// User Agent generation
userAgent, err := GetUserAgent("librex-text-search")
@ -63,10 +63,6 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
Source: "LibreX",
}
if debugMode {
log.Printf("LibreX result: %+v\n", result)
}
results = append(results, result)
}

182
text.go
View file

@ -1,44 +1,59 @@
package main
import (
"flag"
"fmt"
"html/template"
"log"
"math/rand"
"net/http"
"sort"
"sync"
"time"
)
var (
debugMode bool
searchEngines []searchEngine
searchEngineLock sync.Mutex
)
type searchEngine struct {
Name string
Func func(string, string, string, int) ([]TextSearchResult, error)
Weight int
}
func init() {
flag.BoolVar(&debugMode, "debug", false, "enable debug mode")
flag.Parse()
debugMode = false
searchEngines = []searchEngine{
{Name: "Google", Func: PerformGoogleTextSearch, Weight: 1},
{Name: "LibreX", Func: PerformLibreXTextSearch, Weight: 2},
// {Name: "DuckDuckGo", Func: PerformDuckDuckGoTextSearch, Weight: 3}, // DuckDuckGo timeouts too fast and search results are trash
// {Name: "SearXNG", Func: PerformSearXNGTextSearch, Weight: 2}, // Uncomment when implemented
}
rand.Seed(time.Now().UnixNano())
}
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
startTime := time.Now()
const resultsPerPage = 10
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page, resultsPerPage)
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
hasPrevPage := page > 1
hasNextPage := len(combinedResults) == resultsPerPage
hasNextPage := len(combinedResults) > 0
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
// Always check and cache the next page if not enough results
if hasNextPage {
go cacheNextPageIfNotCached(query, safe, lang, page+1, resultsPerPage)
// Prefetch next and previous pages
go prefetchPage(query, safe, lang, page+1)
if hasPrevPage {
go prefetchPage(query, safe, lang, page-1)
}
}
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []TextSearchResult
@ -56,7 +71,7 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
combinedResults = fetchTextResults(query, safe, lang, page)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
} else {
textResults, _, _ := convertToSpecificResults(results)
@ -64,129 +79,70 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
}
case <-time.After(2 * time.Second):
log.Println("Cache check timeout")
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
combinedResults = fetchTextResults(query, safe, lang, page)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
return combinedResults
}
func cacheNextPageIfNotCached(query, safe, lang string, page, resultsPerPage int) {
func prefetchPage(query, safe, lang string, page int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
log.Printf("Next page %d not cached, caching now...", page)
nextPageResults := fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
resultsCache.Set(cacheKey, convertToSearchResults(nextPageResults))
log.Printf("Page %d not cached, caching now...", page)
pageResults := fetchTextResults(query, safe, lang, page)
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
} else {
log.Printf("Next page %d already cached", page)
log.Printf("Page %d already cached", page)
}
}
func fetchTextResultsUntilFull(query, safe, lang string, targetPage, resultsPerPage int) []TextSearchResult {
var combinedResults []TextSearchResult
currentPage := 1
resultsNeeded := targetPage * resultsPerPage
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
engine := selectSearchEngine()
log.Printf("Using search engine: %s", engine.Name)
for len(combinedResults) < resultsNeeded {
cacheKey := CacheKey{Query: query, Page: targetPage, Safe: safe == "true", Lang: lang, Type: "text"}
cachedResults, exists := resultsCache.Get(cacheKey)
if exists {
textResults, _, _ := convertToSpecificResults(cachedResults)
combinedResults = append(combinedResults, textResults...)
results, err := engine.Func(query, safe, lang, page)
if err != nil {
log.Printf("Error performing search with %s: %v", engine.Name, err)
return nil
}
return results
}
func selectSearchEngine() searchEngine {
searchEngineLock.Lock()
defer searchEngineLock.Unlock()
totalWeight := 0
for _, engine := range searchEngines {
totalWeight += engine.Weight
}
randValue := rand.Intn(totalWeight)
for _, engine := range searchEngines {
if randValue < engine.Weight {
// Adjust weights for load balancing
for i := range searchEngines {
if searchEngines[i].Name == engine.Name {
searchEngines[i].Weight = max(1, searchEngines[i].Weight-1)
} else {
results := fetchAndCacheTextResults(query, safe, lang, currentPage, resultsPerPage)
if len(results) == 0 {
break
}
combinedResults = append(combinedResults, results...)
resultsCache.Set(cacheKey, convertToSearchResults(results))
}
currentPage++
// Stop fetching if we have enough results for the target page and the next page
if len(combinedResults) >= resultsNeeded+resultsPerPage {
break
searchEngines[i].Weight++
}
}
startIndex := (targetPage - 1) * resultsPerPage
endIndex := startIndex + resultsPerPage
if startIndex >= len(combinedResults) {
return []TextSearchResult{}
return engine
}
if endIndex > len(combinedResults) {
endIndex = len(combinedResults)
randValue -= engine.Weight
}
return combinedResults[startIndex:endIndex]
return searchEngines[0] // fallback to the first engine
}
func fetchAndCacheTextResults(query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
var combinedResults []TextSearchResult
var wg sync.WaitGroup
var mu sync.Mutex
resultsChan := make(chan []TextSearchResult)
searchFuncs := []struct {
Func func(string, string, string, int) ([]TextSearchResult, error)
Source string
}{
{PerformGoogleTextSearch, "Google"},
// {PerformLibreXTextSearch, "LibreX"},
// {PerformSearXNGTextSearch, "SearXNG"},
}
wg.Add(len(searchFuncs))
for _, searchFunc := range searchFuncs {
go func(searchFunc func(string, string, string, int) ([]TextSearchResult, error), source string) {
defer wg.Done()
results, err := searchFunc(query, safe, lang, page)
if err == nil {
for i := range results {
results[i].Source = source
}
resultsChan <- results
} else {
log.Printf("Error performing search from %s: %v", source, err)
}
}(searchFunc.Func, searchFunc.Source)
}
go func() {
wg.Wait()
close(resultsChan)
}()
for results := range resultsChan {
mu.Lock()
combinedResults = append(combinedResults, results...)
mu.Unlock()
}
sort.SliceStable(combinedResults, func(i, j int) bool {
return sourceOrder(combinedResults[i].Source) < sourceOrder(combinedResults[j].Source)
})
log.Printf("Fetched %d results for page %d", len(combinedResults), page)
return combinedResults
}
func sourceOrder(source string) int {
switch source {
case "Google":
return 1
case "LibreX":
return 2
case "SearchXNG":
return 3
default:
return 4
func max(a, b int) int {
if a > b {
return a
}
return b
}
func displayResults(w http.ResponseWriter, results []TextSearchResult, query, lang string, elapsed float64, page int, hasPrevPage, hasNextPage bool) {