Compare commits

..

No commits in common. "d0c20bdc8f4bd49f709af3379a7bfc717fb77e3d" and "606cae2dc96d08750910fbc92d4c03dae4c54a1c" have entirely different histories.

30 changed files with 415 additions and 1028 deletions

2
.gitignore vendored
View file

@ -1,2 +0,0 @@
config.json
opensearch.xml

View file

@ -20,28 +20,15 @@
- [ ] Better name
- [ ] LXC container
- [ ] Docker container
- [ ] Automatic updates
- [ ] Scalable crawlers and webservers + load balacing
# Ocásek (Warp) Search Engine
# Go Search Engine
A self-hosted private and anonymous [metasearch engine](https://en.wikipedia.org/wiki/Metasearch_engine), that aims to be more resource effichent and scalable. Decentralized services are nice, but juming between instances when one just stops working for some reason is just inconvenient. So thats why this engine can do both, you can self-hoste it or use [officiall instance](https://search.spitfirebrowser.com/).
## Comparison to other search engines
| Name | Works without JS | Privacy frontend redirect | Torrent results | API | No 3rd party libs | Scalable | Not Resource Hungry | Dynamic Page Loading |
|------------|----------------------|---------------------------|-----------------|-----|-------------------|----------|---------------------------------------------|----------------------|
| Whoogle | ✅ | ❓ Only host can set it | ❌ | ❌ | ❌ | ❌ | ❓ Moderate | ❓ Not specified |
| Araa-Search| ✅ | ✅ | ✅ | ✅ | ❓ | ❌ | ❌ Very resource hungry | ❌ |
| LibreY | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | ❓ Moderate | ❌ |
| Ocásek | ✅ | ✅ | ✅ | ❌ | ✅ [1] | ✅ | ✅ about 20MiB at idle, 21MiB when searching| ✅ |
[1]: It does not rely on 3rd-party libs for webscraping like [Selenium](https://www.javatpoint.com/selenium-webdriver), but it uses other search instalces like LibreX as fallback.
A self-hosted [metasearch engine](https://en.wikipedia.org/wiki/Metasearch_engine) that respects privacy, contains no ads, and serves as a proxy/alternative to Google website.
## Features
- Text search using Google, Brave, DuckDuckGo and LibreX/Y search results.
- Image search using the Qwant/Imgur.
- Text search using Google search results.
- Image search using the Qwant API.
- Video search using Piped API.
- Image viewing using proxy and direct links to image source pages for image searches.
- Maps using OpenStreetMap
@ -52,8 +39,7 @@ A self-hosted private and anonymous [metasearch engine](https://en.wikipedia.org
### Prerequisites
- Go (version 1.18 or higher recommended)
- Git (unexpected)
- Access to the internet for fetching results (even more unexpected)
- Access to the internet for fetching results from the Qwant API and Google
### Running the Application
@ -62,6 +48,4 @@ git clone https://weforgecode.xyz/Spitfire/Search.git
cd Search
chmod +x ./run.sh
./run.sh
```
*Its that easy!*
```

View file

@ -1,18 +1,13 @@
// common_cache.go
package main
import (
"fmt"
"log"
"sync"
"time"
"github.com/shirou/gopsutil/mem"
)
var (
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
maxMemoryUsage = 90.0 // Maximum memory usage in %
)
var resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
// SearchResult is a generic interface for all types of search results.
type SearchResult interface{}
@ -119,13 +114,9 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
rc.mu.Lock()
defer rc.mu.Unlock()
if _, exists := rc.results[rc.keyToString(key)]; !exists {
rc.results[rc.keyToString(key)] = CachedItem{
Results: results,
StoredTime: time.Now(),
}
go rc.checkAndCleanCache()
rc.results[rc.keyToString(key)] = CachedItem{
Results: results,
StoredTime: time.Now(),
}
}
@ -134,46 +125,7 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
}
func (rc *ResultsCache) checkAndCleanCache() {
if rc.memoryUsage() > maxMemoryUsage {
rc.cleanOldestItems()
}
}
func (rc *ResultsCache) memoryUsage() float64 {
v, err := mem.VirtualMemory()
if err != nil {
log.Printf("Failed to get memory info: %v", err)
return 0
}
return v.UsedPercent
}
func (rc *ResultsCache) cleanOldestItems() {
rc.mu.Lock()
defer rc.mu.Unlock()
for rc.memoryUsage() > maxMemoryUsage {
var oldestKey string
var oldestTime time.Time = time.Now()
for key, item := range rc.results {
if item.StoredTime.Before(oldestTime) {
oldestTime = item.StoredTime
oldestKey = key
}
}
if oldestKey != "" {
delete(rc.results, oldestKey)
log.Printf("Removed oldest cache item: %s", oldestKey)
} else {
break
}
}
}
// Helper functions to convert between generic SearchResult and specific ImageSearchResult
func convertToSearchResults(results interface{}) []SearchResult {
switch res := results.(type) {
case []TextSearchResult:

View file

@ -1,17 +0,0 @@
package main
import (
"html/template"
)
var (
debugMode bool = true
funcs = template.FuncMap{
"sub": func(a, b int) int {
return a - b
},
"add": func(a, b int) int {
return a + b
},
}
)

View file

@ -128,10 +128,10 @@ func isInstanceValid(instance SearXInstance) bool {
}
}
// func main() {
// instance, err := getRandomSearXInstance()
// if err != nil {
// log.Fatalf("Failed to get a SearX instance: %v", err)
// }
// fmt.Printf("Selected SearX instance: %s\n", instance.URL)
// }
func main() {
instance, err := getRandomSearXInstance()
if err != nil {
log.Fatalf("Failed to get a SearX instance: %v", err)
}
fmt.Printf("Selected SearX instance: %s\n", instance.URL)
}

18
go.mod
View file

@ -2,22 +2,8 @@ module searchengine
go 1.18
require github.com/PuerkitoBio/goquery v1.9.1 // direct
require (
github.com/PuerkitoBio/goquery v1.9.1 // direct
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732 // indirect
github.com/chromedp/chromedp v0.9.5 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.3.2 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible
golang.org/x/net v0.21.0 // indirect
golang.org/x/sys v0.17.0 // indirect
golang.org/x/time v0.5.0 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
)
)

31
go.sum
View file

@ -2,31 +2,7 @@ github.com/PuerkitoBio/goquery v1.9.1 h1:mTL6XjbJTZdpfL+Gwl5U2h1l9yEkJjhmlTeV9VP
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732 h1:XYUCaZrW8ckGWlCRJKCSoh/iFwlpX316a8yY9IFEzv8=
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
github.com/chromedp/chromedp v0.9.5 h1:viASzruPJOiThk7c5bueOUY91jGLJVximoEMGoH93rg=
github.com/chromedp/chromedp v0.9.5/go.mod h1:D4I2qONslauw/C7INoCir1BJkSwBYMyZgx8X276z3+Y=
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
github.com/gobwas/ws v1.3.2 h1:zlnbNHxumkRvfPWgfXu8RBwyNR1x8wh9cf5PTOCqs9Q=
github.com/gobwas/ws v1.3.2/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
@ -42,17 +18,12 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -62,8 +33,6 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=

View file

@ -1,149 +0,0 @@
package main
import (
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
)
// PerformImgurImageSearch performs an image search on Imgur and returns the results
func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchResult, time.Duration, error) {
startTime := time.Now() // Start the timer
var results []ImageSearchResult
searchURL := buildImgurSearchURL(query, page)
resp, err := http.Get(searchURL)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
}
doc.Find("div.cards div.post").Each(func(i int, s *goquery.Selection) {
thumbnailSrc, exists := s.Find("a img").Attr("src")
if !exists || len(thumbnailSrc) < 25 {
return
}
imgSrc := strings.Replace(thumbnailSrc, "b.", ".", 1)
// Ensure the URLs have the correct protocol
if !strings.HasPrefix(thumbnailSrc, "http") {
thumbnailSrc = "https:" + thumbnailSrc
}
if !strings.HasPrefix(imgSrc, "http") {
imgSrc = "https:" + imgSrc
}
urlPath, exists := s.Find("a").Attr("href")
if !exists {
return
}
// Scrape the image directly from the Imgur page
imgSrc = scrapeImageFromImgurPage("https://imgur.com" + urlPath)
// Remove any query parameters from the URL
imgSrc = removeQueryParameters(imgSrc)
title, _ := s.Find("a img").Attr("alt")
width, _ := strconv.Atoi(s.Find("a img").AttrOr("width", "0"))
height, _ := strconv.Atoi(s.Find("a img").AttrOr("height", "0"))
results = append(results, ImageSearchResult{
Thumbnail: thumbnailSrc,
Title: strings.TrimSpace(title),
Media: imgSrc,
Width: width,
Height: height,
Source: "https://imgur.com" + urlPath,
ThumbProxy: imgSrc, //"/img_proxy?url=" + url.QueryEscape(imgSrc)
})
})
duration := time.Since(startTime) // Calculate the duration
return results, duration, nil
}
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
func scrapeImageFromImgurPage(pageURL string) string {
resp, err := http.Get(pageURL)
if err != nil {
fmt.Printf("Error fetching page: %v\n", err)
return ""
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
fmt.Printf("Unexpected status code: %d\n", resp.StatusCode)
return ""
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
fmt.Printf("Error loading HTML document: %v\n", err)
return ""
}
imgSrc, exists := doc.Find("meta[property='og:image']").Attr("content")
if !exists {
fmt.Printf("Image not found on page: %s\n", pageURL)
return ""
}
// Ensure the URL has the correct protocol
if !strings.HasPrefix(imgSrc, "http") {
imgSrc = "https:" + imgSrc
}
return imgSrc
}
// removeQueryParameters removes query parameters from a URL
func removeQueryParameters(rawURL string) string {
parsedURL, err := url.Parse(rawURL)
if err != nil {
fmt.Printf("Error parsing URL: %v\n", err)
return rawURL
}
parsedURL.RawQuery = ""
return parsedURL.String()
}
func buildImgurSearchURL(query string, page int) string {
baseURL := "https://imgur.com/search/score/all"
params := url.Values{}
params.Add("q", query)
params.Add("qs", "thumbs")
params.Add("p", fmt.Sprintf("%d", page-1))
return fmt.Sprintf("%s?%s", baseURL, params.Encode())
}
// func main() {
// results, duration, err := PerformImgurImageSearch("cats", "true", "en", 1)
// if err != nil {
// fmt.Println("Error:", err)
// return
// }
// fmt.Printf("Search took: %v\n", duration)
// for _, result := range results {
// fmt.Printf("Title: %s\nSource: %s\nMedia: %s\nThumbnail: %s\nThumbProxy: %s\nWidth: %d\nHeight: %d\n\n",
// result.Title, result.Source, result.Media, result.Thumbnail, result.ThumbProxy, result.Width, result.Height)
// }
// }

View file

@ -1,99 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"time"
)
// QwantAPIResponse represents the JSON response structure from Qwant API
type QwantAPIResponse struct {
Data struct {
Result struct {
Items []struct {
Media string `json:"media"`
Thumbnail string `json:"thumbnail"`
Title string `json:"title"`
Url string `json:"url"`
Width int `json:"width"`
Height int `json:"height"`
} `json:"items"`
} `json:"result"`
} `json:"data"`
}
// PerformQwantImageSearch performs an image search on Qwant and returns the results.
func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchResult, time.Duration, error) {
startTime := time.Now() // Start the timer
const resultsPerPage = 50
var offset int
if page <= 1 {
offset = 0
} else {
offset = (page - 1) * resultsPerPage
}
if safe == "" {
safe = "0"
}
if lang == "" {
lang = "en_CA"
}
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/images?t=images&q=%s&count=%d&locale=%s&offset=%d&device=desktop&tgp=2&safesearch=%s",
url.QueryEscape(query),
resultsPerPage,
lang,
offset,
safe)
client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", apiURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
ImageUserAgent, err := GetUserAgent("Image-Search")
if err != nil {
return nil, 0, err
}
req.Header.Set("User-Agent", ImageUserAgent)
resp, err := client.Do(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
var apiResp QwantAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, 0, fmt.Errorf("decoding response: %v", err)
}
var results []ImageSearchResult
for _, item := range apiResp.Data.Result.Items {
results = append(results, ImageSearchResult{
Thumbnail: item.Thumbnail,
Title: item.Title,
Media: item.Media,
Source: item.Url,
ThumbProxy: "/img_proxy?url=" + url.QueryEscape(item.Media),
Width: item.Width,
Height: item.Height,
})
}
duration := time.Since(startTime) // Calculate the duration
return results, duration, nil
}

169
images.go
View file

@ -1,22 +1,120 @@
package main
import (
"encoding/json"
"fmt"
"html/template"
"log"
"net/http"
"net/url"
"time"
)
var imageSearchEngines []SearchEngine
func init() {
imageSearchEngines = []SearchEngine{
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch), Weight: 1},
{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 2},
}
// QwantAPIResponse represents the JSON response structure from Qwant API
type QwantAPIResponse struct {
Data struct {
Result struct {
Items []struct {
Media string `json:"media"`
Thumbnail string `json:"thumbnail"`
Title string `json:"title"`
Url string `json:"url"`
Width int `json:"width"`
Height int `json:"height"`
} `json:"items"`
} `json:"result"`
} `json:"data"`
}
var funcs = template.FuncMap{
"sub": func(a, b int) int {
return a - b
},
"add": func(a, b int) int {
return a + b
},
}
// FetchImageResults contacts the image search API and returns a slice of ImageSearchResult
func fetchImageResults(query string, safe, lang string, page int) ([]ImageSearchResult, error) {
const resultsPerPage = 50
var offset int
if page <= 1 {
offset = 0
} else {
offset = (page - 1) * resultsPerPage
}
// Ensuring safe search is disabled by default if not specified
if safe == "" {
safe = "0"
}
// Defaulting to English Canada locale if not specified
if lang == "" {
lang = "en_CA"
}
// Format &lang=lang_de is incorrect, implement fix !
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/images?t=images&q=%s&count=%d&locale=%s&offset=%d&device=desktop&tgp=2&safesearch=%s",
url.QueryEscape(query),
resultsPerPage,
lang,
offset,
safe)
client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", apiURL, nil)
if err != nil {
return nil, fmt.Errorf("creating request: %v", err)
}
// User Agent generation
ImageUserAgent, err := GetUserAgent("Image-Search")
if err != nil {
fmt.Println("Error:", err)
return nil, err
}
if debugMode {
fmt.Println("Generated User Agent (images):", ImageUserAgent)
}
req.Header.Set("User-Agent", ImageUserAgent)
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
var apiResp QwantAPIResponse
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
return nil, fmt.Errorf("decoding response: %v", err)
}
var results []ImageSearchResult
for _, item := range apiResp.Data.Result.Items {
results = append(results, ImageSearchResult{
Thumbnail: item.Thumbnail, // Thumbnail URL
Title: item.Title, // Image title
Media: item.Media, // Direct link to the image
Source: item.Url,
ThumbProxy: "/img_proxy?url=" + url.QueryEscape(item.Media),
Width: item.Width,
Height: item.Height,
})
}
return results, nil
}
// HandleImageSearch is the HTTP handler for image search requests
func handleImageSearch(w http.ResponseWriter, query, safe, lang string, page int) {
startTime := time.Now()
@ -76,66 +174,31 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchImageResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
combinedResults = fetchAndCacheImageResults(query, safe, lang, page)
} else {
_, _, imageResults := convertToSpecificResults(results)
combinedResults = imageResults
}
case <-time.After(2 * time.Second):
log.Println("Cache check timeout")
combinedResults = fetchImageResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
combinedResults = fetchAndCacheImageResults(query, safe, lang, page)
}
return combinedResults
}
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
var results []ImageSearchResult
for _, engine := range imageSearchEngines {
log.Printf("Using image search engine: %s", engine.Name)
searchResults, duration, err := engine.Func(query, safe, lang, page)
updateEngineMetrics(&engine, duration, err == nil)
if err != nil {
log.Printf("Error performing image search with %s: %v", engine.Name, err)
continue
}
for _, result := range searchResults {
results = append(results, result.(ImageSearchResult))
}
// If results are found, break out of the loop
if len(results) > 0 {
break
func fetchAndCacheImageResults(query, safe, lang string, page int) []ImageSearchResult {
results, err := fetchImageResults(query, safe, lang, page)
if err != nil || len(results) == 0 {
log.Printf("Error fetching image results: %v", err)
return []ImageSearchResult{
{Title: "Results are currently unavailable, sorry. Please try again later."},
}
}
// If no results found after trying all engines
if len(results) == 0 {
log.Printf("No image results found for query: %s", query)
}
// Cache the valid results
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "image"}
resultsCache.Set(cacheKey, convertToSearchResults(results))
return results
}
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
imageResults, duration, err := f(query, safe, lang, page)
if err != nil {
return nil, duration, err
}
searchResults := make([]SearchResult, len(imageResults))
for i, result := range imageResults {
searchResults[i] = result
}
return searchResults, duration, nil
}
}

116
init.go
View file

@ -1,116 +0,0 @@
package main
import (
"bufio"
"encoding/json"
"fmt"
"log"
"os"
"strconv"
)
// Configuration structure
type Config struct {
Port int
OpenSearch OpenSearchConfig
}
type OpenSearchConfig struct {
Domain string
}
// Default configuration values
var defaultConfig = Config{
Port: 5000,
OpenSearch: OpenSearchConfig{
Domain: "localhost",
},
}
const configFilePath = "config.json"
func main() {
// Run the initialization process
err := initConfig()
if err != nil {
fmt.Println("Error during initialization:", err)
return
}
// Start the main application
runServer()
}
func initConfig() error {
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
return createConfig()
}
fmt.Println("Configuration file already exists.")
return nil
}
func createConfig() error {
reader := bufio.NewReader(os.Stdin)
fmt.Println("Configuration file not found.")
fmt.Print("Do you want to use default values? (yes/no): ")
useDefaults, _ := reader.ReadString('\n')
config := defaultConfig
if useDefaults != "yes\n" {
fmt.Print("Enter port (default 5000): ")
portStr, _ := reader.ReadString('\n')
if portStr != "\n" {
port, err := strconv.Atoi(portStr[:len(portStr)-1])
if err != nil {
return err
}
config.Port = port
}
fmt.Print("Enter your domain address (e.g., domain.com): ")
domain, _ := reader.ReadString('\n')
if domain != "\n" {
config.OpenSearch.Domain = domain[:len(domain)-1]
}
}
saveConfig(config)
return nil
}
func saveConfig(config Config) {
file, err := os.Create(configFilePath)
if err != nil {
fmt.Println("Error creating config file:", err)
return
}
defer file.Close()
configData, err := json.MarshalIndent(config, "", " ")
if err != nil {
fmt.Println("Error marshalling config data:", err)
return
}
_, err = file.Write(configData)
if err != nil {
fmt.Println("Error writing to config file:", err)
}
}
func loadConfig() Config {
configFile, err := os.Open(configFilePath)
if err != nil {
log.Fatalf("Error opening config file: %v", err)
}
defer configFile.Close()
var config Config
if err := json.NewDecoder(configFile).Decode(&config); err != nil {
log.Fatalf("Error decoding config file: %v", err)
}
return config
}

34
main.go
View file

@ -63,6 +63,19 @@ var languageOptions = []LanguageOption{
{Code: "lang_vi", Name: "Tiếng Việt (Vietnamese)"},
}
func main() {
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
http.HandleFunc("/", handleSearch)
http.HandleFunc("/search", handleSearch)
http.HandleFunc("/img_proxy", handleImageProxy)
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, "templates/settings.html")
})
initializeTorrentSites()
fmt.Println("Server is listening on http://localhost:5000")
log.Fatal(http.ListenAndServe(":5000", nil))
}
func handleSearch(w http.ResponseWriter, r *http.Request) {
query, safe, lang, searchType, page := parseSearchParams(r)
@ -120,24 +133,3 @@ func parsePageParameter(pageStr string) int {
}
return page
}
func runServer() {
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
http.HandleFunc("/", handleSearch)
http.HandleFunc("/search", handleSearch)
http.HandleFunc("/img_proxy", handleImageProxy)
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, "templates/settings.html")
})
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
http.ServeFile(w, r, "static/opensearch.xml")
})
initializeTorrentSites()
config := loadConfig()
generateOpenSearchXML(config)
fmt.Printf("Server is listening on http://localhost:%d\n", config.Port)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
}

View file

@ -1,50 +0,0 @@
package main
import (
"encoding/xml"
"fmt"
"os"
)
type OpenSearchDescription struct {
XMLName xml.Name `xml:"OpenSearchDescription"`
Xmlns string `xml:"xmlns,attr"`
ShortName string `xml:"ShortName"`
Description string `xml:"Description"`
Tags string `xml:"Tags"`
URL URL `xml:"Url"`
}
type URL struct {
Type string `xml:"type,attr"`
Template string `xml:"template,attr"`
}
func generateOpenSearchXML(config Config) {
opensearch := OpenSearchDescription{
Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
ShortName: "Ocásek",
Description: "Search engine",
Tags: "search, engine",
URL: URL{
Type: "text/html",
Template: fmt.Sprintf("https://%s/search?q={searchTerms}", config.OpenSearch.Domain),
},
}
file, err := os.Create("static/opensearch.xml")
if err != nil {
fmt.Println("Error creating OpenSearch file:", err)
return
}
defer file.Close()
enc := xml.NewEncoder(file)
enc.Indent(" ", " ")
if err := enc.Encode(opensearch); err != nil {
fmt.Println("Error encoding OpenSearch XML:", err)
return
}
fmt.Println("OpenSearch description file generated successfully.")
}

8
run.sh
View file

@ -1,7 +1,3 @@
#!/bin/sh
#!/bin/bash
# Find all .go files in the current directory
GO_FILES=$(find . -name '*.go' -print)
# Run the Go program
go run $GO_FILES
go run main.go images.go imageproxy.go video.go map.go text.go text-searchxng.go text-librex.go text-google.go cache.go forums.go files.go files-torrentgalaxy.go files-thepiratebay.go agent.go --debug

View file

@ -1,90 +0,0 @@
package main
import (
"math/rand"
"sync"
"time"
)
var (
searchEngineLock sync.Mutex
)
// SearchEngine struct now includes metrics for calculating reputation.
type SearchEngine struct {
Name string
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
Weight int
TotalRequests int
TotalTime time.Duration
SuccessfulSearches int
FailedSearches int
}
// init function seeds the random number generator.
func init() {
rand.Seed(time.Now().UnixNano())
}
// Selects a search engine based on weighted random selection with dynamic weighting.
func selectSearchEngine(engines []SearchEngine) SearchEngine {
searchEngineLock.Lock()
defer searchEngineLock.Unlock()
// Recalculate weights based on average response time and success rate.
for i := range engines {
engines[i].Weight = calculateReputation(engines[i])
}
totalWeight := 0
for _, engine := range engines {
totalWeight += engine.Weight
}
randValue := rand.Intn(totalWeight)
for _, engine := range engines {
if randValue < engine.Weight {
return engine
}
randValue -= engine.Weight
}
return engines[0] // fallback to the first engine
}
// Updates the engine's performance metrics.
func updateEngineMetrics(engine *SearchEngine, responseTime time.Duration, success bool) {
searchEngineLock.Lock()
defer searchEngineLock.Unlock()
engine.TotalRequests++
engine.TotalTime += responseTime
if success {
engine.SuccessfulSearches++
} else {
engine.FailedSearches++
}
engine.Weight = calculateReputation(*engine)
}
// Calculates the reputation of the search engine based on average response time and success rate.
func calculateReputation(engine SearchEngine) int {
const referenceTime = time.Second // 1 second reference time in nanoseconds (1000 ms)
if engine.TotalRequests == 0 {
return 10 // Default weight for new engines
}
// Calculate average response time in seconds.
avgResponseTime := engine.TotalTime.Seconds() / float64(engine.TotalRequests)
// Calculate success rate.
successRate := float64(engine.SuccessfulSearches) / float64(engine.TotalRequests)
// Combine response time and success rate into a single reputation score.
// The formula can be adjusted to weigh response time and success rate differently.
reputation := (referenceTime.Seconds() / avgResponseTime) * successRate
// Scale reputation for better interpretability (e.g., multiply by 10)
return int(reputation * 10)
}

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{.Query}} - Ocásek</title>
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{.Query}} - Ocásek</title>
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{.Query}} - Ocásek</title>
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{ .Query }} - Ocásek</title>
<link rel="stylesheet" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
<script src="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.js"></script>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.css" />
<style>

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Search with Ocásek</title>
<link rel="stylesheet" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<div class="settings-search-div settings-search-div-search">

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Settings - Ocásek</title>
<link rel="stylesheet" type="text/css" href="static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
@ -47,7 +46,7 @@
</form>
<div class="results_settings">
<form>
<h1>SETTINGS ARE NOT IMPLEMENTED YET</h1>
<h1>Settings</h1>
<h2>Theme</h2>
<label for="theme-dark">Dark Theme:</label>
<input type="checkbox" class="results-settings" id="theme-dark" name="theme" value="dark"><br>

View file

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{.Query}} - Ocásek</title>
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
@ -57,7 +56,7 @@
</select>
<button class="results-save" name="t" value="text">Apply settings</button>
</form>
<div class="results" id="results">
<div class="results">
{{if .Results}}
{{range .Results}}
<div class="result_item">
@ -67,13 +66,11 @@
</div>
<br>
{{end}}
{{else if .NoResults}}
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
{{else}}
<div class="no-more-results">Looks like this is the end of results.</div>
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
{{end}}
</div>
<div class="prev-next prev-img" id="prev-next">
</div>
<div class="prev-next prev-img">
<form action="/search" method="get">
<input type="hidden" name="q" value="{{ .Query }}">
<input type="hidden" name="t" value="text">
@ -86,48 +83,8 @@
</form>
</div>
<script>
document.addEventListener("DOMContentLoaded", function() {
if (document.getElementById('prev-next')) {
document.getElementById('prev-next').style.display = 'none';
let page = {{ .Page }};
const query = "{{ .Query }}";
let loading = false;
let hasMoreResults = true;
function loadResults(newPage) {
if (loading || !hasMoreResults) return;
loading = true;
fetch(`/search?q=${encodeURIComponent(query)}&t=text&p=${newPage}`)
.then(response => response.text())
.then(data => {
const parser = new DOMParser();
const doc = parser.parseFromString(data, 'text/html');
const newResults = doc.getElementById('results').innerHTML;
const noResultsMessage = "No results found for '{{ .Query }}'. Try different keywords.";
if (newResults.includes(noResultsMessage)) {
document.getElementById('results').innerHTML += "<div class='no-more-results'>Looks like this is the end of results.</div>";
hasMoreResults = false;
} else {
document.getElementById('results').innerHTML += newResults;
page = newPage;
}
loading = false;
})
.catch(error => {
console.error('Error loading results:', error);
loading = false;
});
}
window.addEventListener('scroll', () => {
if (window.innerHeight + window.scrollY >= document.body.offsetHeight) {
loadResults(page + 1);
}
});
}
});
// Check if JavaScript is enabled and modify the DOM accordingly
document.getElementById('content').classList.remove('js-enabled');
</script>
</body>
</html>

View file

@ -5,16 +5,15 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{.Query}} - Ocásek</title>
<link rel="stylesheet" href="/static/css/style.css">
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
</head>
<body>
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
<h1 class="logomobile"><a class="no-decoration" href="./">Ocásek</a></h1>
<div class="wrapper-results">
<input type="text" name="q" value="{{ .Query }}" id="search-input" placeholder="Type to search..." />
<button id="search-wrapper-ico" class="material-icons-round" name="t" value="video">search</button>
<input type="submit" class="hide" name="t" value="video" />
</div>
<input type="text" name="q" value="{{ .Query }}" id="search-input" placeholder="Type to search..." />
<button id="search-wrapper-ico" class="material-icons-round" name="t" value="video">search</button>
<input type="submit" class="hide" name="t" value="video" />
</div>
<div class="sub-search-button-wrapper">
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="text">search</button>
@ -33,16 +32,17 @@
<button name="t" value="forum" class="clickable">Forums</button>
</div>
<div id="content" class="js-enabled">
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="map">map</button>
<button name="t" value="map" class="clickable">Maps</button>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="map">map</button>
<button name="t" value="map" class="clickable">Maps</button>
</div>
</div>
<div class="search-container-results-btn">
<button id="sub-search-wrapper-ico" class="material-icons-round clickable" name="t" value="file">share</button>
<button name="t" value="file" class="clickable">Torrents</button>
</div>
</div>
</div>
</form>
<!-- Results go here -->
<p class="fetched fetched_dif fetched_vid"><!-- { fetched } --></p>
@ -51,11 +51,11 @@
<div>
<div class="video__results">
<div class="video__img__results">
<a href="{{ .Href }}"> <img src="{{ .Image }}">
<div class="duration">{{ .Duration }}</div>
</img></a>
<a href="{{ .Href }}"> <img src="{{ .Image }}">
<div class="duration">{{ .Duration }}</div>
</img></a>
</div>
<div class="results video-results-margin">
<div class="results video-results-margin">
<h3 class="video_title" href="{{ .Href }}">{{ .Title }}</h3></a>
<p class="stats">{{ .Views }} <span class="pipe">|</span> {{ .Date }}</p>
<p class="publish__info">YouTube <span class="pipe">|</span> {{ .Creator }}</p>
@ -64,23 +64,12 @@
</div>
{{ end }}
{{ else }}
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>>
{{ end }}
<div class="prev-next prev-img" id="prev-next">
<form action="/search" method="get">
<input type="hidden" name="q" value="{{ .Query }}">
<input type="hidden" name="t" value="video">
{{ if .HasPrevPage }}
<button type="submit" name="p" value="{{ sub .Page 1 }}">Previous</button>
{{ end }}
{{ if .HasNextPage }}
<button type="submit" name="p" value="{{ add .Page 1 }}">Next</button>
{{ end }}
</form>
</div>
<script>
// Check if JavaScript is enabled and modify the DOM accordingly
document.getElementById('content').classList.remove('js-enabled');
</script>
</body>
</html>
</html>

View file

@ -1,78 +0,0 @@
package main
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
)
// PerformBraveTextSearch performs a text search on Brave and returns the results.
func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchResult, time.Duration, error) {
startTime := time.Now() // Start the timer
var results []TextSearchResult
// Build the search URL
searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s&offset=%d", url.QueryEscape(query), offset)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("creating request: %v", err)
}
// Set headers including User-Agent
TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil {
return nil, 0, err
}
req.Header.Set("User-Agent", TextUserAgent)
// Perform the HTTP request
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, 0, fmt.Errorf("performing request: %v", err)
}
defer resp.Body.Close()
// Read the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("reading response body: %v", err)
}
// Parse the response body
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
if err != nil {
return nil, 0, fmt.Errorf("parsing response body: %v", err)
}
// Extract search results
doc.Find(".snippet").Each(func(i int, s *goquery.Selection) {
title := s.Find(".title").Text()
description := s.Find(".snippet-description").Text()
url, exists := s.Find("a").Attr("href")
// Add to results only if all components are present
if title != "" && description != "" && exists && url != "" {
results = append(results, TextSearchResult{
Header: title,
URL: url,
Description: description,
})
}
})
duration := time.Since(startTime) // Calculate the duration
// Return an error if no results are found
if len(results) == 0 {
return nil, duration, fmt.Errorf("no results found")
}
return results, duration, nil
}

View file

@ -2,6 +2,7 @@ package main
import (
"fmt"
"log"
"net/http"
"net/url"
"strings"
@ -10,25 +11,73 @@ import (
"github.com/PuerkitoBio/goquery"
)
func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
startTime := time.Now() // Start the timer
const (
resultsPerPage = 10
)
var results []TextSearchResult
searchURL := buildDuckDuckGoSearchURL(query, page)
resp, err := http.Get(searchURL)
func getVQD(query string) (string, error) {
queryURL := fmt.Sprintf("https://duckduckgo.com/?q=%s", url.QueryEscape(query))
resp, err := http.Get(queryURL)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
return "", fmt.Errorf("failed to fetch vqd: %v", err)
}
defer resp.Body.Close()
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return "", fmt.Errorf("loading HTML document: %v", err)
}
var vqd string
doc.Find("script").Each(func(i int, s *goquery.Selection) {
text := s.Text()
if strings.Contains(text, "vqd=\"") {
start := strings.Index(text, "vqd=\"") + 5
end := strings.Index(text[start:], "\"")
vqd = text[start : start+end]
}
})
if vqd == "" {
return "", fmt.Errorf("vqd not found")
}
return vqd, nil
}
func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
var results []TextSearchResult
client := &http.Client{Timeout: 10 * time.Second}
vqd, err := getVQD(query)
if err != nil {
return nil, fmt.Errorf("failed to get vqd: %v", err)
}
searchURL := fmt.Sprintf("https://duckduckgo.com/html/?q=%s&kl=%s&safe=%s&s=%d&vqd=%s",
url.QueryEscape(query), lang, safe, (page-1)*resultsPerPage, vqd)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %v", err)
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
return nil, fmt.Errorf("loading HTML document: %v", err)
}
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
@ -45,22 +94,34 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSear
URL: uddg,
Header: strings.TrimSpace(header),
Description: strings.TrimSpace(description),
Source: "DuckDuckGo",
}
results = append(results, result)
if debugMode {
log.Printf("Processed DuckDuckGo result: %+v\n", result)
}
} else {
if debugMode {
log.Printf("Missing 'uddg' parameter in URL: %s\n", rawURL)
}
}
} else {
if debugMode {
log.Printf("Error parsing URL: %s, error: %v\n", rawURL, err)
}
}
} else {
if debugMode {
log.Printf("Missing 'href' attribute in result anchor tag\n")
}
}
})
duration := time.Since(startTime) // Calculate the duration
return results, duration, nil
}
func buildDuckDuckGoSearchURL(query string, page int) string {
startParam := ""
if page > 1 {
startParam = fmt.Sprintf("&s=%d", (page-1)*10)
if len(results) == 0 {
if debugMode {
log.Println("No results found from DuckDuckGo")
}
}
return fmt.Sprintf("https://duckduckgo.com/html/?q=%s%s", url.QueryEscape(query), startParam)
return results, nil
}

View file

@ -6,59 +6,59 @@ import (
"net/http"
"net/url"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
)
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
const resultsPerPage = 10
var results []TextSearchResult
startTime := time.Now() // Start the timer
client := &http.Client{}
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, fmt.Errorf("failed to create request: %v", err)
return nil, fmt.Errorf("failed to create request: %v", err)
}
// User Agent generation
TextUserAgent, err := GetUserAgent("Text-Search")
if err != nil {
return nil, 0, err
fmt.Println("Error:", err)
return nil, err
}
if debugMode {
fmt.Println("Generated User Agent (text):", TextUserAgent)
}
req.Header.Set("User-Agent", TextUserAgent)
resp, err := client.Do(req)
if err != nil {
return nil, 0, fmt.Errorf("making request: %v", err)
return nil, fmt.Errorf("making request: %v", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
return nil, fmt.Errorf("loading HTML document: %v", err)
}
results = parseResults(doc)
duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
if debugMode {
log.Println("No results found from Google")
}
}
return results, duration, nil
return results, nil
}
func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
@ -104,6 +104,9 @@ func parseResults(doc *goquery.Document) []TextSearchResult {
Description: description,
}
results = append(results, result)
if debugMode {
log.Printf("Google result: %+v\n", result)
}
})
return results

View file

@ -6,7 +6,6 @@ import (
"log"
"net/http"
"net/url"
"time"
)
const LIBREX_DOMAIN = "librex.antopie.org"
@ -19,41 +18,40 @@ type LibreXResult struct {
type LibreXResponse []LibreXResult
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
startTime := time.Now() // Start the timer
// LibreX/Y uses offset instead of page that starts at 0
page--
page = page * 10
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
// LibreX uses page starting from 0
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page-1)
// User Agent generation
userAgent, err := GetUserAgent("librex-text-search")
if err != nil {
return nil, 0, err
return nil, err
}
if debugMode {
log.Println("Generated User Agent (text):", userAgent)
}
req, err := http.NewRequest("GET", searchURL, nil)
if err != nil {
return nil, 0, err
return nil, err
}
req.Header.Set("User-Agent", userAgent)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, 0, logError("error making request to LibreX", err)
return nil, logError("error making request to LibreX", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, 0, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
return nil, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
}
var librexResp LibreXResponse
if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
return nil, 0, logError("error decoding LibreX response", err)
return nil, logError("error decoding LibreX response", err)
}
var results []TextSearchResult
@ -65,19 +63,16 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
Source: "LibreX",
}
if debugMode {
log.Printf("LibreX result: %+v\n", result)
}
results = append(results, result)
}
duration := time.Since(startTime) // Calculate the duration
if len(results) == 0 {
return nil, duration, fmt.Errorf("no results found")
}
return results, duration, nil
return results, nil
}
// This is just stupid it will probbably lead to printing error twice
func logError(message string, err error) error {
log.Printf("%s: %v", message, err)
return fmt.Errorf("%s: %w", message, err)

View file

@ -3,6 +3,7 @@ package main
import (
"encoding/json"
"fmt"
"log"
"net/http"
"net/url"
"time"
@ -26,9 +27,11 @@ type QwantTextAPIResponse struct {
}
// PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult
func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error) {
func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
const resultsPerPage = 10
const offset = 0
// Calculate the offset based on the page number
offset := (page - 1) * resultsPerPage
// Ensure safe search is disabled by default if not specified
if safe == "" {
@ -40,11 +43,12 @@ func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error
lang = "en_CA"
}
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop&safesearch=%s",
url.QueryEscape(query),
resultsPerPage,
lang,
offset)
offset,
safe)
client := &http.Client{Timeout: 10 * time.Second}
@ -93,6 +97,9 @@ func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error
func cleanQwantURL(rawURL string) string {
u, err := url.Parse(rawURL)
if err != nil {
if debugMode {
log.Printf("Error parsing URL: %v", err)
}
return rawURL
}
return u.Scheme + "://" + u.Host + u.Path

183
text.go
View file

@ -1,44 +1,44 @@
package main
import (
"flag"
"fmt"
"html/template"
"log"
"net/http"
"sort"
"sync"
"time"
)
var textSearchEngines []SearchEngine
var (
debugMode bool
)
func init() {
textSearchEngines = []SearchEngine{
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5}, // DuckDuckGo timeouts too fast and search results are trash
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
}
flag.BoolVar(&debugMode, "debug", false, "enable debug mode")
flag.Parse()
}
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
startTime := time.Now()
const resultsPerPage = 10
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page, resultsPerPage)
hasPrevPage := page > 1
hasNextPage := len(combinedResults) > 0
hasNextPage := len(combinedResults) == resultsPerPage
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
// Prefetch next and previous pages
go prefetchPage(query, safe, lang, page+1)
if hasPrevPage {
go prefetchPage(query, safe, lang, page-1)
// Always check and cache the next page if not enough results
if hasNextPage {
go cacheNextPageIfNotCached(query, safe, lang, page+1, resultsPerPage)
}
}
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
cacheChan := make(chan []SearchResult)
var combinedResults []TextSearchResult
@ -56,87 +56,136 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
select {
case results := <-cacheChan:
if results == nil {
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
} else {
textResults, _, _ := convertToSpecificResults(results)
combinedResults = textResults
}
case <-time.After(2 * time.Second):
log.Println("Cache check timeout")
combinedResults = fetchTextResults(query, safe, lang, page)
if len(combinedResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
}
return combinedResults
}
func prefetchPage(query, safe, lang string, page int) {
func cacheNextPageIfNotCached(query, safe, lang string, page, resultsPerPage int) {
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
if _, exists := resultsCache.Get(cacheKey); !exists {
log.Printf("Page %d not cached, caching now...", page)
pageResults := fetchTextResults(query, safe, lang, page)
if len(pageResults) > 0 {
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
}
log.Printf("Next page %d not cached, caching now...", page)
nextPageResults := fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
resultsCache.Set(cacheKey, convertToSearchResults(nextPageResults))
} else {
log.Printf("Page %d already cached", page)
log.Printf("Next page %d already cached", page)
}
}
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
var results []TextSearchResult
func fetchTextResultsUntilFull(query, safe, lang string, targetPage, resultsPerPage int) []TextSearchResult {
var combinedResults []TextSearchResult
currentPage := 1
resultsNeeded := targetPage * resultsPerPage
for _, engine := range textSearchEngines {
log.Printf("Using search engine: %s", engine.Name)
searchResults, duration, err := engine.Func(query, safe, lang, page)
updateEngineMetrics(&engine, duration, err == nil)
if err != nil {
log.Printf("Error performing search with %s: %v", engine.Name, err)
continue
for len(combinedResults) < resultsNeeded {
cacheKey := CacheKey{Query: query, Page: targetPage, Safe: safe == "true", Lang: lang, Type: "text"}
cachedResults, exists := resultsCache.Get(cacheKey)
if exists {
textResults, _, _ := convertToSpecificResults(cachedResults)
combinedResults = append(combinedResults, textResults...)
} else {
results := fetchAndCacheTextResults(query, safe, lang, currentPage, resultsPerPage)
if len(results) == 0 {
break
}
combinedResults = append(combinedResults, results...)
resultsCache.Set(cacheKey, convertToSearchResults(results))
}
results = append(results, validateResults(searchResults)...)
currentPage++
// If results are found, break out of the loop
if len(results) > 0 {
// Stop fetching if we have enough results for the target page and the next page
if len(combinedResults) >= resultsNeeded+resultsPerPage {
break
}
}
return results
}
startIndex := (targetPage - 1) * resultsPerPage
endIndex := startIndex + resultsPerPage
func validateResults(searchResults []SearchResult) []TextSearchResult {
var validResults []TextSearchResult
// Remove anything that is missing a URL or Header
for _, result := range searchResults {
textResult := result.(TextSearchResult)
if textResult.URL != "" || textResult.Header != "" {
validResults = append(validResults, textResult)
}
if startIndex >= len(combinedResults) {
return []TextSearchResult{}
}
if endIndex > len(combinedResults) {
endIndex = len(combinedResults)
}
return validResults
return combinedResults[startIndex:endIndex]
}
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
textResults, duration, err := f(query, safe, lang, page)
if err != nil {
return nil, duration, err
}
searchResults := make([]SearchResult, len(textResults))
for i, result := range textResults {
searchResults[i] = result
}
return searchResults, duration, nil
func fetchAndCacheTextResults(query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
var combinedResults []TextSearchResult
var wg sync.WaitGroup
var mu sync.Mutex
resultsChan := make(chan []TextSearchResult)
searchFuncs := []struct {
Func func(string, string, string, int) ([]TextSearchResult, error)
Source string
}{
{PerformGoogleTextSearch, "Google"},
{PerformLibreXTextSearch, "LibreX"},
// {PerformSearXNGTextSearch, "SearXNG"},
}
wg.Add(len(searchFuncs))
for _, searchFunc := range searchFuncs {
go func(searchFunc func(string, string, string, int) ([]TextSearchResult, error), source string) {
defer wg.Done()
results, err := searchFunc(query, safe, lang, page)
if err == nil {
for i := range results {
results[i].Source = source
}
resultsChan <- results
} else {
log.Printf("Error performing search from %s: %v", source, err)
}
}(searchFunc.Func, searchFunc.Source)
}
go func() {
wg.Wait()
close(resultsChan)
}()
for results := range resultsChan {
mu.Lock()
combinedResults = append(combinedResults, results...)
mu.Unlock()
}
sort.SliceStable(combinedResults, func(i, j int) bool {
return sourceOrder(combinedResults[i].Source) < sourceOrder(combinedResults[j].Source)
})
log.Printf("Fetched %d results for page %d", len(combinedResults), page)
return combinedResults
}
func sourceOrder(source string) int {
switch source {
case "Google":
return 1
case "LibreX":
return 2
case "SearchXNG":
return 3
default:
return 4
}
}
@ -167,7 +216,6 @@ func displayResults(w http.ResponseWriter, results []TextSearchResult, query, la
HasNextPage bool
LanguageOptions []LanguageOption
CurrentLang string
NoResults bool
}{
Results: results,
Query: query,
@ -177,7 +225,6 @@ func displayResults(w http.ResponseWriter, results []TextSearchResult, query, la
HasNextPage: hasNextPage,
LanguageOptions: languageOptions,
CurrentLang: lang,
NoResults: len(results) == 0,
}
err = tmpl.Execute(w, data)

View file

@ -180,23 +180,16 @@ func handleVideoSearch(w http.ResponseWriter, query, safe, lang string, page int
}
elapsed := time.Since(start)
tmpl, err := template.New("videos.html").Funcs(funcs).ParseFiles("templates/videos.html")
tmpl, err := template.ParseFiles("templates/videos.html")
if err != nil {
log.Printf("Error parsing template: %v", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
err = tmpl.Execute(w, map[string]interface{}{
tmpl.Execute(w, map[string]interface{}{
"Results": results,
"Query": query,
"Fetched": fmt.Sprintf("%.2f seconds", elapsed.Seconds()),
"Page": page,
"HasPrevPage": page > 1,
"HasNextPage": len(results) > 0, // assuming you have a way to determine if there are more pages
})
if err != nil {
log.Printf("Error executing template: %v", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
}
}