commit
d0c20bdc8f
30 changed files with 1024 additions and 411 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
config.json
|
||||||
|
opensearch.xml
|
26
README.md
26
README.md
|
@ -20,15 +20,28 @@
|
||||||
- [ ] Better name
|
- [ ] Better name
|
||||||
- [ ] LXC container
|
- [ ] LXC container
|
||||||
- [ ] Docker container
|
- [ ] Docker container
|
||||||
|
- [ ] Automatic updates
|
||||||
|
- [ ] Scalable crawlers and webservers + load balacing
|
||||||
|
|
||||||
# Go Search Engine
|
# Ocásek (Warp) Search Engine
|
||||||
|
|
||||||
A self-hosted [metasearch engine](https://en.wikipedia.org/wiki/Metasearch_engine) that respects privacy, contains no ads, and serves as a proxy/alternative to Google website.
|
A self-hosted private and anonymous [metasearch engine](https://en.wikipedia.org/wiki/Metasearch_engine), that aims to be more resource effichent and scalable. Decentralized services are nice, but juming between instances when one just stops working for some reason is just inconvenient. So thats why this engine can do both, you can self-hoste it or use [officiall instance](https://search.spitfirebrowser.com/).
|
||||||
|
|
||||||
|
## Comparison to other search engines
|
||||||
|
|
||||||
|
| Name | Works without JS | Privacy frontend redirect | Torrent results | API | No 3rd party libs | Scalable | Not Resource Hungry | Dynamic Page Loading |
|
||||||
|
|------------|----------------------|---------------------------|-----------------|-----|-------------------|----------|---------------------------------------------|----------------------|
|
||||||
|
| Whoogle | ✅ | ❓ Only host can set it | ❌ | ❌ | ❌ | ❌ | ❓ Moderate | ❓ Not specified |
|
||||||
|
| Araa-Search| ✅ | ✅ | ✅ | ✅ | ❓ | ❌ | ❌ Very resource hungry | ❌ |
|
||||||
|
| LibreY | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | ❓ Moderate | ❌ |
|
||||||
|
| Ocásek | ✅ | ✅ | ✅ | ❌ | ✅ [1] | ✅ | ✅ about 20MiB at idle, 21MiB when searching| ✅ |
|
||||||
|
|
||||||
|
[1]: It does not rely on 3rd-party libs for webscraping like [Selenium](https://www.javatpoint.com/selenium-webdriver), but it uses other search instalces like LibreX as fallback.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Text search using Google search results.
|
- Text search using Google, Brave, DuckDuckGo and LibreX/Y search results.
|
||||||
- Image search using the Qwant API.
|
- Image search using the Qwant/Imgur.
|
||||||
- Video search using Piped API.
|
- Video search using Piped API.
|
||||||
- Image viewing using proxy and direct links to image source pages for image searches.
|
- Image viewing using proxy and direct links to image source pages for image searches.
|
||||||
- Maps using OpenStreetMap
|
- Maps using OpenStreetMap
|
||||||
|
@ -39,7 +52,8 @@ A self-hosted [metasearch engine](https://en.wikipedia.org/wiki/Metasearch_engin
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
- Go (version 1.18 or higher recommended)
|
- Go (version 1.18 or higher recommended)
|
||||||
- Access to the internet for fetching results from the Qwant API and Google
|
- Git (unexpected)
|
||||||
|
- Access to the internet for fetching results (even more unexpected)
|
||||||
|
|
||||||
### Running the Application
|
### Running the Application
|
||||||
|
|
||||||
|
@ -49,3 +63,5 @@ cd Search
|
||||||
chmod +x ./run.sh
|
chmod +x ./run.sh
|
||||||
./run.sh
|
./run.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
*Its that easy!*
|
54
cache.go
54
cache.go
|
@ -1,13 +1,18 @@
|
||||||
// common_cache.go
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/shirou/gopsutil/mem"
|
||||||
)
|
)
|
||||||
|
|
||||||
var resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
|
var (
|
||||||
|
resultsCache = NewResultsCache(6 * time.Hour) // Cache with 6-hour expiration
|
||||||
|
maxMemoryUsage = 90.0 // Maximum memory usage in %
|
||||||
|
)
|
||||||
|
|
||||||
// SearchResult is a generic interface for all types of search results.
|
// SearchResult is a generic interface for all types of search results.
|
||||||
type SearchResult interface{}
|
type SearchResult interface{}
|
||||||
|
@ -114,10 +119,14 @@ func (rc *ResultsCache) Get(key CacheKey) ([]SearchResult, bool) {
|
||||||
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
|
func (rc *ResultsCache) Set(key CacheKey, results []SearchResult) {
|
||||||
rc.mu.Lock()
|
rc.mu.Lock()
|
||||||
defer rc.mu.Unlock()
|
defer rc.mu.Unlock()
|
||||||
|
|
||||||
|
if _, exists := rc.results[rc.keyToString(key)]; !exists {
|
||||||
rc.results[rc.keyToString(key)] = CachedItem{
|
rc.results[rc.keyToString(key)] = CachedItem{
|
||||||
Results: results,
|
Results: results,
|
||||||
StoredTime: time.Now(),
|
StoredTime: time.Now(),
|
||||||
}
|
}
|
||||||
|
go rc.checkAndCleanCache()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// keyToString converts a CacheKey to a string representation.
|
// keyToString converts a CacheKey to a string representation.
|
||||||
|
@ -125,7 +134,46 @@ func (rc *ResultsCache) keyToString(key CacheKey) string {
|
||||||
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
|
return fmt.Sprintf("%s|%d|%t|%s|%s", key.Query, key.Page, key.Safe, key.Lang, key.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper functions to convert between generic SearchResult and specific ImageSearchResult
|
func (rc *ResultsCache) checkAndCleanCache() {
|
||||||
|
if rc.memoryUsage() > maxMemoryUsage {
|
||||||
|
rc.cleanOldestItems()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc *ResultsCache) memoryUsage() float64 {
|
||||||
|
v, err := mem.VirtualMemory()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Failed to get memory info: %v", err)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return v.UsedPercent
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc *ResultsCache) cleanOldestItems() {
|
||||||
|
rc.mu.Lock()
|
||||||
|
defer rc.mu.Unlock()
|
||||||
|
|
||||||
|
for rc.memoryUsage() > maxMemoryUsage {
|
||||||
|
var oldestKey string
|
||||||
|
var oldestTime time.Time = time.Now()
|
||||||
|
|
||||||
|
for key, item := range rc.results {
|
||||||
|
if item.StoredTime.Before(oldestTime) {
|
||||||
|
oldestTime = item.StoredTime
|
||||||
|
oldestKey = key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if oldestKey != "" {
|
||||||
|
delete(rc.results, oldestKey)
|
||||||
|
log.Printf("Removed oldest cache item: %s", oldestKey)
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func convertToSearchResults(results interface{}) []SearchResult {
|
func convertToSearchResults(results interface{}) []SearchResult {
|
||||||
switch res := results.(type) {
|
switch res := results.(type) {
|
||||||
case []TextSearchResult:
|
case []TextSearchResult:
|
||||||
|
|
17
common.go
Normal file
17
common.go
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
debugMode bool = true
|
||||||
|
funcs = template.FuncMap{
|
||||||
|
"sub": func(a, b int) int {
|
||||||
|
return a - b
|
||||||
|
},
|
||||||
|
"add": func(a, b int) int {
|
||||||
|
return a + b
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
|
@ -128,10 +128,10 @@ func isInstanceValid(instance SearXInstance) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
// func main() {
|
||||||
instance, err := getRandomSearXInstance()
|
// instance, err := getRandomSearXInstance()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
log.Fatalf("Failed to get a SearX instance: %v", err)
|
// log.Fatalf("Failed to get a SearX instance: %v", err)
|
||||||
}
|
// }
|
||||||
fmt.Printf("Selected SearX instance: %s\n", instance.URL)
|
// fmt.Printf("Selected SearX instance: %s\n", instance.URL)
|
||||||
}
|
// }
|
||||||
|
|
16
go.mod
16
go.mod
|
@ -2,8 +2,22 @@ module searchengine
|
||||||
|
|
||||||
go 1.18
|
go 1.18
|
||||||
|
|
||||||
|
require github.com/PuerkitoBio/goquery v1.9.1 // direct
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/PuerkitoBio/goquery v1.9.1 // direct
|
|
||||||
github.com/andybalholm/cascadia v1.3.2 // indirect
|
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||||
|
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732 // indirect
|
||||||
|
github.com/chromedp/chromedp v0.9.5 // indirect
|
||||||
|
github.com/chromedp/sysutil v1.0.0 // indirect
|
||||||
|
github.com/gobwas/httphead v0.1.0 // indirect
|
||||||
|
github.com/gobwas/pool v0.2.1 // indirect
|
||||||
|
github.com/gobwas/ws v1.3.2 // indirect
|
||||||
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
|
github.com/shirou/gopsutil v3.21.11+incompatible
|
||||||
golang.org/x/net v0.21.0 // indirect
|
golang.org/x/net v0.21.0 // indirect
|
||||||
|
golang.org/x/sys v0.17.0 // indirect
|
||||||
|
golang.org/x/time v0.5.0 // indirect
|
||||||
|
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||||
)
|
)
|
31
go.sum
31
go.sum
|
@ -2,7 +2,31 @@ github.com/PuerkitoBio/goquery v1.9.1 h1:mTL6XjbJTZdpfL+Gwl5U2h1l9yEkJjhmlTeV9VP
|
||||||
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
|
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
|
||||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||||
|
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732 h1:XYUCaZrW8ckGWlCRJKCSoh/iFwlpX316a8yY9IFEzv8=
|
||||||
|
github.com/chromedp/cdproto v0.0.0-20240202021202-6d0b6a386732/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
|
||||||
|
github.com/chromedp/chromedp v0.9.5 h1:viASzruPJOiThk7c5bueOUY91jGLJVximoEMGoH93rg=
|
||||||
|
github.com/chromedp/chromedp v0.9.5/go.mod h1:D4I2qONslauw/C7INoCir1BJkSwBYMyZgx8X276z3+Y=
|
||||||
|
github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic=
|
||||||
|
github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww=
|
||||||
|
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||||
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
|
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
||||||
|
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
||||||
|
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
||||||
|
github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
|
||||||
|
github.com/gobwas/ws v1.3.2 h1:zlnbNHxumkRvfPWgfXu8RBwyNR1x8wh9cf5PTOCqs9Q=
|
||||||
|
github.com/gobwas/ws v1.3.2/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY=
|
||||||
|
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||||
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
|
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
||||||
|
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||||
|
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
|
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||||
|
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
|
||||||
|
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||||
|
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
@ -18,12 +42,17 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
|
||||||
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
@ -33,6 +62,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||||
|
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
|
149
images-imgur.go
Normal file
149
images-imgur.go
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PerformImgurImageSearch performs an image search on Imgur and returns the results
|
||||||
|
func PerformImgurImageSearch(query, safe, lang string, page int) ([]ImageSearchResult, time.Duration, error) {
|
||||||
|
startTime := time.Now() // Start the timer
|
||||||
|
|
||||||
|
var results []ImageSearchResult
|
||||||
|
searchURL := buildImgurSearchURL(query, page)
|
||||||
|
|
||||||
|
resp, err := http.Get(searchURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
doc.Find("div.cards div.post").Each(func(i int, s *goquery.Selection) {
|
||||||
|
thumbnailSrc, exists := s.Find("a img").Attr("src")
|
||||||
|
if !exists || len(thumbnailSrc) < 25 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
imgSrc := strings.Replace(thumbnailSrc, "b.", ".", 1)
|
||||||
|
|
||||||
|
// Ensure the URLs have the correct protocol
|
||||||
|
if !strings.HasPrefix(thumbnailSrc, "http") {
|
||||||
|
thumbnailSrc = "https:" + thumbnailSrc
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(imgSrc, "http") {
|
||||||
|
imgSrc = "https:" + imgSrc
|
||||||
|
}
|
||||||
|
|
||||||
|
urlPath, exists := s.Find("a").Attr("href")
|
||||||
|
if !exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scrape the image directly from the Imgur page
|
||||||
|
imgSrc = scrapeImageFromImgurPage("https://imgur.com" + urlPath)
|
||||||
|
|
||||||
|
// Remove any query parameters from the URL
|
||||||
|
imgSrc = removeQueryParameters(imgSrc)
|
||||||
|
|
||||||
|
title, _ := s.Find("a img").Attr("alt")
|
||||||
|
|
||||||
|
width, _ := strconv.Atoi(s.Find("a img").AttrOr("width", "0"))
|
||||||
|
height, _ := strconv.Atoi(s.Find("a img").AttrOr("height", "0"))
|
||||||
|
|
||||||
|
results = append(results, ImageSearchResult{
|
||||||
|
Thumbnail: thumbnailSrc,
|
||||||
|
Title: strings.TrimSpace(title),
|
||||||
|
Media: imgSrc,
|
||||||
|
Width: width,
|
||||||
|
Height: height,
|
||||||
|
Source: "https://imgur.com" + urlPath,
|
||||||
|
ThumbProxy: imgSrc, //"/img_proxy?url=" + url.QueryEscape(imgSrc)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
return results, duration, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// scrapeImageFromImgurPage scrapes the image source from the Imgur page
|
||||||
|
func scrapeImageFromImgurPage(pageURL string) string {
|
||||||
|
resp, err := http.Get(pageURL)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error fetching page: %v\n", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
fmt.Printf("Unexpected status code: %d\n", resp.StatusCode)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error loading HTML document: %v\n", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
imgSrc, exists := doc.Find("meta[property='og:image']").Attr("content")
|
||||||
|
if !exists {
|
||||||
|
fmt.Printf("Image not found on page: %s\n", pageURL)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the URL has the correct protocol
|
||||||
|
if !strings.HasPrefix(imgSrc, "http") {
|
||||||
|
imgSrc = "https:" + imgSrc
|
||||||
|
}
|
||||||
|
|
||||||
|
return imgSrc
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeQueryParameters removes query parameters from a URL
|
||||||
|
func removeQueryParameters(rawURL string) string {
|
||||||
|
parsedURL, err := url.Parse(rawURL)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error parsing URL: %v\n", err)
|
||||||
|
return rawURL
|
||||||
|
}
|
||||||
|
parsedURL.RawQuery = ""
|
||||||
|
return parsedURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildImgurSearchURL(query string, page int) string {
|
||||||
|
baseURL := "https://imgur.com/search/score/all"
|
||||||
|
params := url.Values{}
|
||||||
|
params.Add("q", query)
|
||||||
|
params.Add("qs", "thumbs")
|
||||||
|
params.Add("p", fmt.Sprintf("%d", page-1))
|
||||||
|
return fmt.Sprintf("%s?%s", baseURL, params.Encode())
|
||||||
|
}
|
||||||
|
|
||||||
|
// func main() {
|
||||||
|
// results, duration, err := PerformImgurImageSearch("cats", "true", "en", 1)
|
||||||
|
// if err != nil {
|
||||||
|
// fmt.Println("Error:", err)
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
|
||||||
|
// fmt.Printf("Search took: %v\n", duration)
|
||||||
|
// for _, result := range results {
|
||||||
|
// fmt.Printf("Title: %s\nSource: %s\nMedia: %s\nThumbnail: %s\nThumbProxy: %s\nWidth: %d\nHeight: %d\n\n",
|
||||||
|
// result.Title, result.Source, result.Media, result.Thumbnail, result.ThumbProxy, result.Width, result.Height)
|
||||||
|
// }
|
||||||
|
// }
|
99
images-quant.go
Normal file
99
images-quant.go
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// QwantAPIResponse represents the JSON response structure from Qwant API
|
||||||
|
type QwantAPIResponse struct {
|
||||||
|
Data struct {
|
||||||
|
Result struct {
|
||||||
|
Items []struct {
|
||||||
|
Media string `json:"media"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
Width int `json:"width"`
|
||||||
|
Height int `json:"height"`
|
||||||
|
} `json:"items"`
|
||||||
|
} `json:"result"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PerformQwantImageSearch performs an image search on Qwant and returns the results.
|
||||||
|
func PerformQwantImageSearch(query, safe, lang string, page int) ([]ImageSearchResult, time.Duration, error) {
|
||||||
|
startTime := time.Now() // Start the timer
|
||||||
|
|
||||||
|
const resultsPerPage = 50
|
||||||
|
var offset int
|
||||||
|
if page <= 1 {
|
||||||
|
offset = 0
|
||||||
|
} else {
|
||||||
|
offset = (page - 1) * resultsPerPage
|
||||||
|
}
|
||||||
|
|
||||||
|
if safe == "" {
|
||||||
|
safe = "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if lang == "" {
|
||||||
|
lang = "en_CA"
|
||||||
|
}
|
||||||
|
|
||||||
|
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/images?t=images&q=%s&count=%d&locale=%s&offset=%d&device=desktop&tgp=2&safesearch=%s",
|
||||||
|
url.QueryEscape(query),
|
||||||
|
resultsPerPage,
|
||||||
|
lang,
|
||||||
|
offset,
|
||||||
|
safe)
|
||||||
|
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", apiURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ImageUserAgent, err := GetUserAgent("Image-Search")
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("User-Agent", ImageUserAgent)
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var apiResp QwantAPIResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("decoding response: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []ImageSearchResult
|
||||||
|
for _, item := range apiResp.Data.Result.Items {
|
||||||
|
results = append(results, ImageSearchResult{
|
||||||
|
Thumbnail: item.Thumbnail,
|
||||||
|
Title: item.Title,
|
||||||
|
Media: item.Media,
|
||||||
|
Source: item.Url,
|
||||||
|
ThumbProxy: "/img_proxy?url=" + url.QueryEscape(item.Media),
|
||||||
|
Width: item.Width,
|
||||||
|
Height: item.Height,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
return results, duration, nil
|
||||||
|
}
|
169
images.go
169
images.go
|
@ -1,120 +1,22 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// QwantAPIResponse represents the JSON response structure from Qwant API
|
var imageSearchEngines []SearchEngine
|
||||||
type QwantAPIResponse struct {
|
|
||||||
Data struct {
|
func init() {
|
||||||
Result struct {
|
imageSearchEngines = []SearchEngine{
|
||||||
Items []struct {
|
{Name: "Qwant", Func: wrapImageSearchFunc(PerformQwantImageSearch), Weight: 1},
|
||||||
Media string `json:"media"`
|
{Name: "Imgur", Func: wrapImageSearchFunc(PerformImgurImageSearch), Weight: 2},
|
||||||
Thumbnail string `json:"thumbnail"`
|
}
|
||||||
Title string `json:"title"`
|
|
||||||
Url string `json:"url"`
|
|
||||||
Width int `json:"width"`
|
|
||||||
Height int `json:"height"`
|
|
||||||
} `json:"items"`
|
|
||||||
} `json:"result"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var funcs = template.FuncMap{
|
|
||||||
"sub": func(a, b int) int {
|
|
||||||
return a - b
|
|
||||||
},
|
|
||||||
"add": func(a, b int) int {
|
|
||||||
return a + b
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// FetchImageResults contacts the image search API and returns a slice of ImageSearchResult
|
|
||||||
func fetchImageResults(query string, safe, lang string, page int) ([]ImageSearchResult, error) {
|
|
||||||
const resultsPerPage = 50
|
|
||||||
var offset int
|
|
||||||
if page <= 1 {
|
|
||||||
offset = 0
|
|
||||||
} else {
|
|
||||||
offset = (page - 1) * resultsPerPage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensuring safe search is disabled by default if not specified
|
|
||||||
if safe == "" {
|
|
||||||
safe = "0"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Defaulting to English Canada locale if not specified
|
|
||||||
if lang == "" {
|
|
||||||
lang = "en_CA"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format &lang=lang_de is incorrect, implement fix !
|
|
||||||
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/images?t=images&q=%s&count=%d&locale=%s&offset=%d&device=desktop&tgp=2&safesearch=%s",
|
|
||||||
url.QueryEscape(query),
|
|
||||||
resultsPerPage,
|
|
||||||
lang,
|
|
||||||
offset,
|
|
||||||
safe)
|
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", apiURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("creating request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// User Agent generation
|
|
||||||
ImageUserAgent, err := GetUserAgent("Image-Search")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error:", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if debugMode {
|
|
||||||
fmt.Println("Generated User Agent (images):", ImageUserAgent)
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Set("User-Agent", ImageUserAgent)
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("making request: %v", err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
var apiResp QwantAPIResponse
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&apiResp); err != nil {
|
|
||||||
return nil, fmt.Errorf("decoding response: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var results []ImageSearchResult
|
|
||||||
for _, item := range apiResp.Data.Result.Items {
|
|
||||||
results = append(results, ImageSearchResult{
|
|
||||||
Thumbnail: item.Thumbnail, // Thumbnail URL
|
|
||||||
Title: item.Title, // Image title
|
|
||||||
Media: item.Media, // Direct link to the image
|
|
||||||
Source: item.Url,
|
|
||||||
ThumbProxy: "/img_proxy?url=" + url.QueryEscape(item.Media),
|
|
||||||
Width: item.Width,
|
|
||||||
Height: item.Height,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// HandleImageSearch is the HTTP handler for image search requests
|
|
||||||
func handleImageSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
func handleImageSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
|
||||||
|
@ -174,31 +76,66 @@ func getImageResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
combinedResults = fetchAndCacheImageResults(query, safe, lang, page)
|
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
_, _, imageResults := convertToSpecificResults(results)
|
_, _, imageResults := convertToSpecificResults(results)
|
||||||
combinedResults = imageResults
|
combinedResults = imageResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
log.Println("Cache check timeout")
|
log.Println("Cache check timeout")
|
||||||
combinedResults = fetchAndCacheImageResults(query, safe, lang, page)
|
combinedResults = fetchImageResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return combinedResults
|
return combinedResults
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchAndCacheImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
func fetchImageResults(query, safe, lang string, page int) []ImageSearchResult {
|
||||||
results, err := fetchImageResults(query, safe, lang, page)
|
var results []ImageSearchResult
|
||||||
if err != nil || len(results) == 0 {
|
|
||||||
log.Printf("Error fetching image results: %v", err)
|
for _, engine := range imageSearchEngines {
|
||||||
return []ImageSearchResult{
|
log.Printf("Using image search engine: %s", engine.Name)
|
||||||
{Title: "Results are currently unavailable, sorry. Please try again later."},
|
|
||||||
|
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||||
|
updateEngineMetrics(&engine, duration, err == nil)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error performing image search with %s: %v", engine.Name, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, result := range searchResults {
|
||||||
|
results = append(results, result.(ImageSearchResult))
|
||||||
|
}
|
||||||
|
|
||||||
|
// If results are found, break out of the loop
|
||||||
|
if len(results) > 0 {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cache the valid results
|
// If no results found after trying all engines
|
||||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "image"}
|
if len(results) == 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(results))
|
log.Printf("No image results found for query: %s", query)
|
||||||
|
}
|
||||||
|
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func wrapImageSearchFunc(f func(string, string, string, int) ([]ImageSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||||
|
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||||
|
imageResults, duration, err := f(query, safe, lang, page)
|
||||||
|
if err != nil {
|
||||||
|
return nil, duration, err
|
||||||
|
}
|
||||||
|
searchResults := make([]SearchResult, len(imageResults))
|
||||||
|
for i, result := range imageResults {
|
||||||
|
searchResults[i] = result
|
||||||
|
}
|
||||||
|
return searchResults, duration, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
116
init.go
Normal file
116
init.go
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Configuration structure
|
||||||
|
type Config struct {
|
||||||
|
Port int
|
||||||
|
OpenSearch OpenSearchConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpenSearchConfig struct {
|
||||||
|
Domain string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default configuration values
|
||||||
|
var defaultConfig = Config{
|
||||||
|
Port: 5000,
|
||||||
|
OpenSearch: OpenSearchConfig{
|
||||||
|
Domain: "localhost",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const configFilePath = "config.json"
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Run the initialization process
|
||||||
|
err := initConfig()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error during initialization:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the main application
|
||||||
|
runServer()
|
||||||
|
}
|
||||||
|
|
||||||
|
func initConfig() error {
|
||||||
|
if _, err := os.Stat(configFilePath); os.IsNotExist(err) {
|
||||||
|
return createConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Configuration file already exists.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createConfig() error {
|
||||||
|
reader := bufio.NewReader(os.Stdin)
|
||||||
|
|
||||||
|
fmt.Println("Configuration file not found.")
|
||||||
|
fmt.Print("Do you want to use default values? (yes/no): ")
|
||||||
|
useDefaults, _ := reader.ReadString('\n')
|
||||||
|
|
||||||
|
config := defaultConfig
|
||||||
|
if useDefaults != "yes\n" {
|
||||||
|
fmt.Print("Enter port (default 5000): ")
|
||||||
|
portStr, _ := reader.ReadString('\n')
|
||||||
|
if portStr != "\n" {
|
||||||
|
port, err := strconv.Atoi(portStr[:len(portStr)-1])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
config.Port = port
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Print("Enter your domain address (e.g., domain.com): ")
|
||||||
|
domain, _ := reader.ReadString('\n')
|
||||||
|
if domain != "\n" {
|
||||||
|
config.OpenSearch.Domain = domain[:len(domain)-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
saveConfig(config)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func saveConfig(config Config) {
|
||||||
|
file, err := os.Create(configFilePath)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error creating config file:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
configData, err := json.MarshalIndent(config, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error marshalling config data:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = file.Write(configData)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error writing to config file:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadConfig() Config {
|
||||||
|
configFile, err := os.Open(configFilePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error opening config file: %v", err)
|
||||||
|
}
|
||||||
|
defer configFile.Close()
|
||||||
|
|
||||||
|
var config Config
|
||||||
|
if err := json.NewDecoder(configFile).Decode(&config); err != nil {
|
||||||
|
log.Fatalf("Error decoding config file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
34
main.go
34
main.go
|
@ -63,19 +63,6 @@ var languageOptions = []LanguageOption{
|
||||||
{Code: "lang_vi", Name: "Tiếng Việt (Vietnamese)"},
|
{Code: "lang_vi", Name: "Tiếng Việt (Vietnamese)"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
|
||||||
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
|
||||||
http.HandleFunc("/", handleSearch)
|
|
||||||
http.HandleFunc("/search", handleSearch)
|
|
||||||
http.HandleFunc("/img_proxy", handleImageProxy)
|
|
||||||
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
|
|
||||||
http.ServeFile(w, r, "templates/settings.html")
|
|
||||||
})
|
|
||||||
initializeTorrentSites()
|
|
||||||
fmt.Println("Server is listening on http://localhost:5000")
|
|
||||||
log.Fatal(http.ListenAndServe(":5000", nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleSearch(w http.ResponseWriter, r *http.Request) {
|
func handleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
query, safe, lang, searchType, page := parseSearchParams(r)
|
query, safe, lang, searchType, page := parseSearchParams(r)
|
||||||
|
|
||||||
|
@ -133,3 +120,24 @@ func parsePageParameter(pageStr string) int {
|
||||||
}
|
}
|
||||||
return page
|
return page
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func runServer() {
|
||||||
|
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
|
||||||
|
http.HandleFunc("/", handleSearch)
|
||||||
|
http.HandleFunc("/search", handleSearch)
|
||||||
|
http.HandleFunc("/img_proxy", handleImageProxy)
|
||||||
|
http.HandleFunc("/settings", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
http.ServeFile(w, r, "templates/settings.html")
|
||||||
|
})
|
||||||
|
http.HandleFunc("/opensearch.xml", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/opensearchdescription+xml")
|
||||||
|
http.ServeFile(w, r, "static/opensearch.xml")
|
||||||
|
})
|
||||||
|
initializeTorrentSites()
|
||||||
|
|
||||||
|
config := loadConfig()
|
||||||
|
generateOpenSearchXML(config)
|
||||||
|
|
||||||
|
fmt.Printf("Server is listening on http://localhost:%d\n", config.Port)
|
||||||
|
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", config.Port), nil))
|
||||||
|
}
|
||||||
|
|
50
open-search.go
Normal file
50
open-search.go
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type OpenSearchDescription struct {
|
||||||
|
XMLName xml.Name `xml:"OpenSearchDescription"`
|
||||||
|
Xmlns string `xml:"xmlns,attr"`
|
||||||
|
ShortName string `xml:"ShortName"`
|
||||||
|
Description string `xml:"Description"`
|
||||||
|
Tags string `xml:"Tags"`
|
||||||
|
URL URL `xml:"Url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type URL struct {
|
||||||
|
Type string `xml:"type,attr"`
|
||||||
|
Template string `xml:"template,attr"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateOpenSearchXML(config Config) {
|
||||||
|
opensearch := OpenSearchDescription{
|
||||||
|
Xmlns: "http://a9.com/-/spec/opensearch/1.1/",
|
||||||
|
ShortName: "Ocásek",
|
||||||
|
Description: "Search engine",
|
||||||
|
Tags: "search, engine",
|
||||||
|
URL: URL{
|
||||||
|
Type: "text/html",
|
||||||
|
Template: fmt.Sprintf("https://%s/search?q={searchTerms}", config.OpenSearch.Domain),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := os.Create("static/opensearch.xml")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error creating OpenSearch file:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
enc := xml.NewEncoder(file)
|
||||||
|
enc.Indent(" ", " ")
|
||||||
|
if err := enc.Encode(opensearch); err != nil {
|
||||||
|
fmt.Println("Error encoding OpenSearch XML:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("OpenSearch description file generated successfully.")
|
||||||
|
}
|
8
run.sh
8
run.sh
|
@ -1,3 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/sh
|
||||||
|
|
||||||
go run main.go images.go imageproxy.go video.go map.go text.go text-searchxng.go text-librex.go text-google.go cache.go forums.go files.go files-torrentgalaxy.go files-thepiratebay.go agent.go --debug
|
# Find all .go files in the current directory
|
||||||
|
GO_FILES=$(find . -name '*.go' -print)
|
||||||
|
|
||||||
|
# Run the Go program
|
||||||
|
go run $GO_FILES
|
||||||
|
|
90
search-engine.go
Normal file
90
search-engine.go
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/rand"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
searchEngineLock sync.Mutex
|
||||||
|
)
|
||||||
|
|
||||||
|
// SearchEngine struct now includes metrics for calculating reputation.
|
||||||
|
type SearchEngine struct {
|
||||||
|
Name string
|
||||||
|
Func func(string, string, string, int) ([]SearchResult, time.Duration, error)
|
||||||
|
Weight int
|
||||||
|
TotalRequests int
|
||||||
|
TotalTime time.Duration
|
||||||
|
SuccessfulSearches int
|
||||||
|
FailedSearches int
|
||||||
|
}
|
||||||
|
|
||||||
|
// init function seeds the random number generator.
|
||||||
|
func init() {
|
||||||
|
rand.Seed(time.Now().UnixNano())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Selects a search engine based on weighted random selection with dynamic weighting.
|
||||||
|
func selectSearchEngine(engines []SearchEngine) SearchEngine {
|
||||||
|
searchEngineLock.Lock()
|
||||||
|
defer searchEngineLock.Unlock()
|
||||||
|
|
||||||
|
// Recalculate weights based on average response time and success rate.
|
||||||
|
for i := range engines {
|
||||||
|
engines[i].Weight = calculateReputation(engines[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
totalWeight := 0
|
||||||
|
for _, engine := range engines {
|
||||||
|
totalWeight += engine.Weight
|
||||||
|
}
|
||||||
|
|
||||||
|
randValue := rand.Intn(totalWeight)
|
||||||
|
for _, engine := range engines {
|
||||||
|
if randValue < engine.Weight {
|
||||||
|
return engine
|
||||||
|
}
|
||||||
|
randValue -= engine.Weight
|
||||||
|
}
|
||||||
|
|
||||||
|
return engines[0] // fallback to the first engine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Updates the engine's performance metrics.
|
||||||
|
func updateEngineMetrics(engine *SearchEngine, responseTime time.Duration, success bool) {
|
||||||
|
searchEngineLock.Lock()
|
||||||
|
defer searchEngineLock.Unlock()
|
||||||
|
|
||||||
|
engine.TotalRequests++
|
||||||
|
engine.TotalTime += responseTime
|
||||||
|
if success {
|
||||||
|
engine.SuccessfulSearches++
|
||||||
|
} else {
|
||||||
|
engine.FailedSearches++
|
||||||
|
}
|
||||||
|
engine.Weight = calculateReputation(*engine)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculates the reputation of the search engine based on average response time and success rate.
|
||||||
|
func calculateReputation(engine SearchEngine) int {
|
||||||
|
const referenceTime = time.Second // 1 second reference time in nanoseconds (1000 ms)
|
||||||
|
|
||||||
|
if engine.TotalRequests == 0 {
|
||||||
|
return 10 // Default weight for new engines
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate average response time in seconds.
|
||||||
|
avgResponseTime := engine.TotalTime.Seconds() / float64(engine.TotalRequests)
|
||||||
|
|
||||||
|
// Calculate success rate.
|
||||||
|
successRate := float64(engine.SuccessfulSearches) / float64(engine.TotalRequests)
|
||||||
|
|
||||||
|
// Combine response time and success rate into a single reputation score.
|
||||||
|
// The formula can be adjusted to weigh response time and success rate differently.
|
||||||
|
reputation := (referenceTime.Seconds() / avgResponseTime) * successRate
|
||||||
|
|
||||||
|
// Scale reputation for better interpretability (e.g., multiply by 10)
|
||||||
|
return int(reputation * 10)
|
||||||
|
}
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{.Query}} - Ocásek</title>
|
<title>{{.Query}} - Ocásek</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{.Query}} - Ocásek</title>
|
<title>{{.Query}} - Ocásek</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{.Query}} - Ocásek</title>
|
<title>{{.Query}} - Ocásek</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{ .Query }} - Ocásek</title>
|
<title>{{ .Query }} - Ocásek</title>
|
||||||
<link rel="stylesheet" href="/static/css/style.css">
|
<link rel="stylesheet" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
<script src="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.js"></script>
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.css" />
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/leaflet@1.9.4/dist/leaflet.css" />
|
||||||
<style>
|
<style>
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>Search with Ocásek</title>
|
<title>Search with Ocásek</title>
|
||||||
<link rel="stylesheet" href="/static/css/style.css">
|
<link rel="stylesheet" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="settings-search-div settings-search-div-search">
|
<div class="settings-search-div settings-search-div-search">
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>Settings - Ocásek</title>
|
<title>Settings - Ocásek</title>
|
||||||
<link rel="stylesheet" type="text/css" href="static/css/style.css">
|
<link rel="stylesheet" type="text/css" href="static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
@ -46,7 +47,7 @@
|
||||||
</form>
|
</form>
|
||||||
<div class="results_settings">
|
<div class="results_settings">
|
||||||
<form>
|
<form>
|
||||||
<h1>Settings</h1>
|
<h1>SETTINGS ARE NOT IMPLEMENTED YET</h1>
|
||||||
<h2>Theme</h2>
|
<h2>Theme</h2>
|
||||||
<label for="theme-dark">Dark Theme:</label>
|
<label for="theme-dark">Dark Theme:</label>
|
||||||
<input type="checkbox" class="results-settings" id="theme-dark" name="theme" value="dark"><br>
|
<input type="checkbox" class="results-settings" id="theme-dark" name="theme" value="dark"><br>
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{.Query}} - Ocásek</title>
|
<title>{{.Query}} - Ocásek</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
<link rel="stylesheet" type="text/css" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
@ -56,7 +57,7 @@
|
||||||
</select>
|
</select>
|
||||||
<button class="results-save" name="t" value="text">Apply settings</button>
|
<button class="results-save" name="t" value="text">Apply settings</button>
|
||||||
</form>
|
</form>
|
||||||
<div class="results">
|
<div class="results" id="results">
|
||||||
{{if .Results}}
|
{{if .Results}}
|
||||||
{{range .Results}}
|
{{range .Results}}
|
||||||
<div class="result_item">
|
<div class="result_item">
|
||||||
|
@ -66,11 +67,13 @@
|
||||||
</div>
|
</div>
|
||||||
<br>
|
<br>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{else}}
|
{{else if .NoResults}}
|
||||||
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
|
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
|
||||||
|
{{else}}
|
||||||
|
<div class="no-more-results">Looks like this is the end of results.</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
<div class="prev-next prev-img">
|
<div class="prev-next prev-img" id="prev-next">
|
||||||
<form action="/search" method="get">
|
<form action="/search" method="get">
|
||||||
<input type="hidden" name="q" value="{{ .Query }}">
|
<input type="hidden" name="q" value="{{ .Query }}">
|
||||||
<input type="hidden" name="t" value="text">
|
<input type="hidden" name="t" value="text">
|
||||||
|
@ -83,8 +86,48 @@
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<script>
|
<script>
|
||||||
// Check if JavaScript is enabled and modify the DOM accordingly
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
document.getElementById('content').classList.remove('js-enabled');
|
if (document.getElementById('prev-next')) {
|
||||||
|
document.getElementById('prev-next').style.display = 'none';
|
||||||
|
|
||||||
|
let page = {{ .Page }};
|
||||||
|
const query = "{{ .Query }}";
|
||||||
|
let loading = false;
|
||||||
|
let hasMoreResults = true;
|
||||||
|
|
||||||
|
function loadResults(newPage) {
|
||||||
|
if (loading || !hasMoreResults) return;
|
||||||
|
loading = true;
|
||||||
|
fetch(`/search?q=${encodeURIComponent(query)}&t=text&p=${newPage}`)
|
||||||
|
.then(response => response.text())
|
||||||
|
.then(data => {
|
||||||
|
const parser = new DOMParser();
|
||||||
|
const doc = parser.parseFromString(data, 'text/html');
|
||||||
|
const newResults = doc.getElementById('results').innerHTML;
|
||||||
|
const noResultsMessage = "No results found for '{{ .Query }}'. Try different keywords.";
|
||||||
|
|
||||||
|
if (newResults.includes(noResultsMessage)) {
|
||||||
|
document.getElementById('results').innerHTML += "<div class='no-more-results'>Looks like this is the end of results.</div>";
|
||||||
|
hasMoreResults = false;
|
||||||
|
} else {
|
||||||
|
document.getElementById('results').innerHTML += newResults;
|
||||||
|
page = newPage;
|
||||||
|
}
|
||||||
|
loading = false;
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
console.error('Error loading results:', error);
|
||||||
|
loading = false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
window.addEventListener('scroll', () => {
|
||||||
|
if (window.innerHeight + window.scrollY >= document.body.offsetHeight) {
|
||||||
|
loadResults(page + 1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>{{.Query}} - Ocásek</title>
|
<title>{{.Query}} - Ocásek</title>
|
||||||
<link rel="stylesheet" href="/static/css/style.css">
|
<link rel="stylesheet" href="/static/css/style.css">
|
||||||
|
<link rel="search" type="application/opensearchdescription+xml" title="Ocásek" href="/opensearch.xml">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
<form action="/search" id="prev-next-form" class="results-search-container" method="GET" autocomplete="off">
|
||||||
|
@ -42,7 +43,6 @@
|
||||||
<button name="t" value="file" class="clickable">Torrents</button>
|
<button name="t" value="file" class="clickable">Torrents</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
</form>
|
</form>
|
||||||
<!-- Results go here -->
|
<!-- Results go here -->
|
||||||
<p class="fetched fetched_dif fetched_vid"><!-- { fetched } --></p>
|
<p class="fetched fetched_dif fetched_vid"><!-- { fetched } --></p>
|
||||||
|
@ -64,9 +64,20 @@
|
||||||
</div>
|
</div>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{ else }}
|
{{ else }}
|
||||||
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>>
|
<div class="no-results">No results found for '{{ .Query }}'. Try different keywords.</div>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
<div class="prev-next prev-img" id="prev-next">
|
||||||
|
<form action="/search" method="get">
|
||||||
|
<input type="hidden" name="q" value="{{ .Query }}">
|
||||||
|
<input type="hidden" name="t" value="video">
|
||||||
|
{{ if .HasPrevPage }}
|
||||||
|
<button type="submit" name="p" value="{{ sub .Page 1 }}">Previous</button>
|
||||||
|
{{ end }}
|
||||||
|
{{ if .HasNextPage }}
|
||||||
|
<button type="submit" name="p" value="{{ add .Page 1 }}">Next</button>
|
||||||
|
{{ end }}
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
<script>
|
<script>
|
||||||
// Check if JavaScript is enabled and modify the DOM accordingly
|
// Check if JavaScript is enabled and modify the DOM accordingly
|
||||||
document.getElementById('content').classList.remove('js-enabled');
|
document.getElementById('content').classList.remove('js-enabled');
|
||||||
|
|
78
text-brave.go
Normal file
78
text-brave.go
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PerformBraveTextSearch performs a text search on Brave and returns the results.
|
||||||
|
func PerformBraveTextSearch(query, safe, lang string, offset int) ([]TextSearchResult, time.Duration, error) {
|
||||||
|
startTime := time.Now() // Start the timer
|
||||||
|
var results []TextSearchResult
|
||||||
|
|
||||||
|
// Build the search URL
|
||||||
|
searchURL := fmt.Sprintf("https://search.brave.com/search?q=%s&offset=%d", url.QueryEscape(query), offset)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set headers including User-Agent
|
||||||
|
TextUserAgent, err := GetUserAgent("Text-Search")
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", TextUserAgent)
|
||||||
|
|
||||||
|
// Perform the HTTP request
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("performing request: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// Read the response body
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("reading response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the response body
|
||||||
|
doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(body)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("parsing response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract search results
|
||||||
|
doc.Find(".snippet").Each(func(i int, s *goquery.Selection) {
|
||||||
|
title := s.Find(".title").Text()
|
||||||
|
description := s.Find(".snippet-description").Text()
|
||||||
|
url, exists := s.Find("a").Attr("href")
|
||||||
|
|
||||||
|
// Add to results only if all components are present
|
||||||
|
if title != "" && description != "" && exists && url != "" {
|
||||||
|
results = append(results, TextSearchResult{
|
||||||
|
Header: title,
|
||||||
|
URL: url,
|
||||||
|
Description: description,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
// Return an error if no results are found
|
||||||
|
if len(results) == 0 {
|
||||||
|
return nil, duration, fmt.Errorf("no results found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, duration, nil
|
||||||
|
}
|
|
@ -2,7 +2,6 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -11,73 +10,25 @@ import (
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
|
||||||
resultsPerPage = 10
|
startTime := time.Now() // Start the timer
|
||||||
)
|
|
||||||
|
|
||||||
func getVQD(query string) (string, error) {
|
|
||||||
queryURL := fmt.Sprintf("https://duckduckgo.com/?q=%s", url.QueryEscape(query))
|
|
||||||
resp, err := http.Get(queryURL)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("failed to fetch vqd: %v", err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("loading HTML document: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var vqd string
|
|
||||||
doc.Find("script").Each(func(i int, s *goquery.Selection) {
|
|
||||||
text := s.Text()
|
|
||||||
if strings.Contains(text, "vqd=\"") {
|
|
||||||
start := strings.Index(text, "vqd=\"") + 5
|
|
||||||
end := strings.Index(text[start:], "\"")
|
|
||||||
vqd = text[start : start+end]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if vqd == "" {
|
|
||||||
return "", fmt.Errorf("vqd not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
return vqd, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
|
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
|
searchURL := buildDuckDuckGoSearchURL(query, page)
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
resp, err := http.Get(searchURL)
|
||||||
|
|
||||||
vqd, err := getVQD(query)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to get vqd: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
|
||||||
|
|
||||||
searchURL := fmt.Sprintf("https://duckduckgo.com/html/?q=%s&kl=%s&safe=%s&s=%d&vqd=%s",
|
|
||||||
url.QueryEscape(query), lang, safe, (page-1)*resultsPerPage, vqd)
|
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create request: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36")
|
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("making request: %v", err)
|
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".result__body").Each(func(i int, s *goquery.Selection) {
|
||||||
|
@ -94,34 +45,22 @@ func PerformDuckDuckGoTextSearch(query, safe, lang string, page int) ([]TextSear
|
||||||
URL: uddg,
|
URL: uddg,
|
||||||
Header: strings.TrimSpace(header),
|
Header: strings.TrimSpace(header),
|
||||||
Description: strings.TrimSpace(description),
|
Description: strings.TrimSpace(description),
|
||||||
Source: "DuckDuckGo",
|
|
||||||
}
|
}
|
||||||
results = append(results, result)
|
results = append(results, result)
|
||||||
if debugMode {
|
|
||||||
log.Printf("Processed DuckDuckGo result: %+v\n", result)
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if debugMode {
|
|
||||||
log.Printf("Missing 'uddg' parameter in URL: %s\n", rawURL)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if debugMode {
|
|
||||||
log.Printf("Error parsing URL: %s, error: %v\n", rawURL, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if debugMode {
|
|
||||||
log.Printf("Missing 'href' attribute in result anchor tag\n")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
if len(results) == 0 {
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
if debugMode {
|
|
||||||
log.Println("No results found from DuckDuckGo")
|
return results, duration, nil
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
func buildDuckDuckGoSearchURL(query string, page int) string {
|
||||||
|
startParam := ""
|
||||||
|
if page > 1 {
|
||||||
|
startParam = fmt.Sprintf("&s=%d", (page-1)*10)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("https://duckduckgo.com/html/?q=%s%s", url.QueryEscape(query), startParam)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,59 +6,59 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
)
|
)
|
||||||
|
|
||||||
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
|
func PerformGoogleTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
|
||||||
const resultsPerPage = 10
|
const resultsPerPage = 10
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
|
|
||||||
|
startTime := time.Now() // Start the timer
|
||||||
|
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
|
searchURL := buildSearchURL(query, safe, lang, page, resultsPerPage)
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create request: %v", err)
|
return nil, 0, fmt.Errorf("failed to create request: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
TextUserAgent, err := GetUserAgent("Text-Search")
|
TextUserAgent, err := GetUserAgent("Text-Search")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error:", err)
|
return nil, 0, err
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if debugMode {
|
|
||||||
fmt.Println("Generated User Agent (text):", TextUserAgent)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
req.Header.Set("User-Agent", TextUserAgent)
|
req.Header.Set("User-Agent", TextUserAgent)
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("making request: %v", err)
|
return nil, 0, fmt.Errorf("making request: %v", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
return nil, 0, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading HTML document: %v", err)
|
return nil, 0, fmt.Errorf("loading HTML document: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
results = parseResults(doc)
|
results = parseResults(doc)
|
||||||
|
|
||||||
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
if debugMode {
|
if debugMode {
|
||||||
log.Println("No results found from Google")
|
log.Println("No results found from Google")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return results, duration, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
|
func buildSearchURL(query, safe, lang string, page, resultsPerPage int) string {
|
||||||
|
@ -104,9 +104,6 @@ func parseResults(doc *goquery.Document) []TextSearchResult {
|
||||||
Description: description,
|
Description: description,
|
||||||
}
|
}
|
||||||
results = append(results, result)
|
results = append(results, result)
|
||||||
if debugMode {
|
|
||||||
log.Printf("Google result: %+v\n", result)
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
const LIBREX_DOMAIN = "librex.antopie.org"
|
const LIBREX_DOMAIN = "librex.antopie.org"
|
||||||
|
@ -18,40 +19,41 @@ type LibreXResult struct {
|
||||||
|
|
||||||
type LibreXResponse []LibreXResult
|
type LibreXResponse []LibreXResult
|
||||||
|
|
||||||
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
|
func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchResult, time.Duration, error) {
|
||||||
// LibreX uses page starting from 0
|
startTime := time.Now() // Start the timer
|
||||||
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page-1)
|
|
||||||
|
// LibreX/Y uses offset instead of page that starts at 0
|
||||||
|
page--
|
||||||
|
page = page * 10
|
||||||
|
|
||||||
|
searchURL := fmt.Sprintf("https://%s/api.php?q=%s&p=%d&t=0", LIBREX_DOMAIN, url.QueryEscape(query), page)
|
||||||
|
|
||||||
// User Agent generation
|
// User Agent generation
|
||||||
userAgent, err := GetUserAgent("librex-text-search")
|
userAgent, err := GetUserAgent("librex-text-search")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, 0, err
|
||||||
}
|
|
||||||
|
|
||||||
if debugMode {
|
|
||||||
log.Println("Generated User Agent (text):", userAgent)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("GET", searchURL, nil)
|
req, err := http.NewRequest("GET", searchURL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
req.Header.Set("User-Agent", userAgent)
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, logError("error making request to LibreX", err)
|
return nil, 0, logError("error making request to LibreX", err)
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return nil, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
|
return nil, 0, logError("unexpected status code", fmt.Errorf("%d", resp.StatusCode))
|
||||||
}
|
}
|
||||||
|
|
||||||
var librexResp LibreXResponse
|
var librexResp LibreXResponse
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(&librexResp); err != nil {
|
||||||
return nil, logError("error decoding LibreX response", err)
|
return nil, 0, logError("error decoding LibreX response", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var results []TextSearchResult
|
var results []TextSearchResult
|
||||||
|
@ -63,16 +65,19 @@ func PerformLibreXTextSearch(query, safe, lang string, page int) ([]TextSearchRe
|
||||||
Source: "LibreX",
|
Source: "LibreX",
|
||||||
}
|
}
|
||||||
|
|
||||||
if debugMode {
|
|
||||||
log.Printf("LibreX result: %+v\n", result)
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, result)
|
results = append(results, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
duration := time.Since(startTime) // Calculate the duration
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
return nil, duration, fmt.Errorf("no results found")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return results, duration, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is just stupid it will probbably lead to printing error twice
|
||||||
func logError(message string, err error) error {
|
func logError(message string, err error) error {
|
||||||
log.Printf("%s: %v", message, err)
|
log.Printf("%s: %v", message, err)
|
||||||
return fmt.Errorf("%s: %w", message, err)
|
return fmt.Errorf("%s: %w", message, err)
|
||||||
|
|
|
@ -3,7 +3,6 @@ package main
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"time"
|
"time"
|
||||||
|
@ -27,11 +26,9 @@ type QwantTextAPIResponse struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult
|
// PerformQwantTextSearch contacts the Qwant API and returns a slice of TextSearchResult
|
||||||
func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchResult, error) {
|
func PerformQwantTextSearch(query, safe, lang string) ([]TextSearchResult, error) {
|
||||||
const resultsPerPage = 10
|
const resultsPerPage = 10
|
||||||
|
const offset = 0
|
||||||
// Calculate the offset based on the page number
|
|
||||||
offset := (page - 1) * resultsPerPage
|
|
||||||
|
|
||||||
// Ensure safe search is disabled by default if not specified
|
// Ensure safe search is disabled by default if not specified
|
||||||
if safe == "" {
|
if safe == "" {
|
||||||
|
@ -43,12 +40,11 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
|
||||||
lang = "en_CA"
|
lang = "en_CA"
|
||||||
}
|
}
|
||||||
|
|
||||||
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop&safesearch=%s",
|
apiURL := fmt.Sprintf("https://api.qwant.com/v3/search/web?q=%s&count=%d&locale=%s&offset=%d&device=desktop",
|
||||||
url.QueryEscape(query),
|
url.QueryEscape(query),
|
||||||
resultsPerPage,
|
resultsPerPage,
|
||||||
lang,
|
lang,
|
||||||
offset,
|
offset)
|
||||||
safe)
|
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
@ -97,9 +93,6 @@ func PerformQwantTextSearch(query, safe, lang string, page int) ([]TextSearchRes
|
||||||
func cleanQwantURL(rawURL string) string {
|
func cleanQwantURL(rawURL string) string {
|
||||||
u, err := url.Parse(rawURL)
|
u, err := url.Parse(rawURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugMode {
|
|
||||||
log.Printf("Error parsing URL: %v", err)
|
|
||||||
}
|
|
||||||
return rawURL
|
return rawURL
|
||||||
}
|
}
|
||||||
return u.Scheme + "://" + u.Host + u.Path
|
return u.Scheme + "://" + u.Host + u.Path
|
||||||
|
|
173
text.go
173
text.go
|
@ -1,44 +1,44 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"flag"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"sort"
|
|
||||||
"sync"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var textSearchEngines []SearchEngine
|
||||||
debugMode bool
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
flag.BoolVar(&debugMode, "debug", false, "enable debug mode")
|
textSearchEngines = []SearchEngine{
|
||||||
flag.Parse()
|
{Name: "Google", Func: wrapTextSearchFunc(PerformGoogleTextSearch), Weight: 1},
|
||||||
|
{Name: "LibreX", Func: wrapTextSearchFunc(PerformLibreXTextSearch), Weight: 2},
|
||||||
|
{Name: "Brave", Func: wrapTextSearchFunc(PerformBraveTextSearch), Weight: 2},
|
||||||
|
{Name: "DuckDuckGo", Func: wrapTextSearchFunc(PerformDuckDuckGoTextSearch), Weight: 5}, // DuckDuckGo timeouts too fast and search results are trash
|
||||||
|
// {Name: "SearXNG", Func: wrapTextSearchFunc(PerformSearXNGTextSearch), Weight: 2}, // Uncomment when implemented
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
func HandleTextSearch(w http.ResponseWriter, query, safe, lang string, page int) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
const resultsPerPage = 10
|
|
||||||
|
|
||||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
||||||
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page, resultsPerPage)
|
combinedResults := getTextResultsFromCacheOrFetch(cacheKey, query, safe, lang, page)
|
||||||
|
|
||||||
hasPrevPage := page > 1
|
hasPrevPage := page > 1
|
||||||
hasNextPage := len(combinedResults) == resultsPerPage
|
hasNextPage := len(combinedResults) > 0
|
||||||
|
|
||||||
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
displayResults(w, combinedResults, query, lang, time.Since(startTime).Seconds(), page, hasPrevPage, hasNextPage)
|
||||||
|
|
||||||
// Always check and cache the next page if not enough results
|
// Prefetch next and previous pages
|
||||||
if hasNextPage {
|
go prefetchPage(query, safe, lang, page+1)
|
||||||
go cacheNextPageIfNotCached(query, safe, lang, page+1, resultsPerPage)
|
if hasPrevPage {
|
||||||
|
go prefetchPage(query, safe, lang, page-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
|
func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string, page int) []TextSearchResult {
|
||||||
cacheChan := make(chan []SearchResult)
|
cacheChan := make(chan []SearchResult)
|
||||||
var combinedResults []TextSearchResult
|
var combinedResults []TextSearchResult
|
||||||
|
|
||||||
|
@ -56,136 +56,87 @@ func getTextResultsFromCacheOrFetch(cacheKey CacheKey, query, safe, lang string,
|
||||||
select {
|
select {
|
||||||
case results := <-cacheChan:
|
case results := <-cacheChan:
|
||||||
if results == nil {
|
if results == nil {
|
||||||
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
textResults, _, _ := convertToSpecificResults(results)
|
textResults, _, _ := convertToSpecificResults(results)
|
||||||
combinedResults = textResults
|
combinedResults = textResults
|
||||||
}
|
}
|
||||||
case <-time.After(2 * time.Second):
|
case <-time.After(2 * time.Second):
|
||||||
log.Println("Cache check timeout")
|
log.Println("Cache check timeout")
|
||||||
combinedResults = fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
|
combinedResults = fetchTextResults(query, safe, lang, page)
|
||||||
|
if len(combinedResults) > 0 {
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
resultsCache.Set(cacheKey, convertToSearchResults(combinedResults))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return combinedResults
|
return combinedResults
|
||||||
}
|
}
|
||||||
|
|
||||||
func cacheNextPageIfNotCached(query, safe, lang string, page, resultsPerPage int) {
|
func prefetchPage(query, safe, lang string, page int) {
|
||||||
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
cacheKey := CacheKey{Query: query, Page: page, Safe: safe == "true", Lang: lang, Type: "text"}
|
||||||
if _, exists := resultsCache.Get(cacheKey); !exists {
|
if _, exists := resultsCache.Get(cacheKey); !exists {
|
||||||
log.Printf("Next page %d not cached, caching now...", page)
|
log.Printf("Page %d not cached, caching now...", page)
|
||||||
nextPageResults := fetchTextResultsUntilFull(query, safe, lang, page, resultsPerPage)
|
pageResults := fetchTextResults(query, safe, lang, page)
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(nextPageResults))
|
if len(pageResults) > 0 {
|
||||||
|
resultsCache.Set(cacheKey, convertToSearchResults(pageResults))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Printf("Next page %d already cached", page)
|
log.Printf("Page %d already cached", page)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchTextResultsUntilFull(query, safe, lang string, targetPage, resultsPerPage int) []TextSearchResult {
|
func fetchTextResults(query, safe, lang string, page int) []TextSearchResult {
|
||||||
var combinedResults []TextSearchResult
|
var results []TextSearchResult
|
||||||
currentPage := 1
|
|
||||||
resultsNeeded := targetPage * resultsPerPage
|
|
||||||
|
|
||||||
for len(combinedResults) < resultsNeeded {
|
for _, engine := range textSearchEngines {
|
||||||
cacheKey := CacheKey{Query: query, Page: targetPage, Safe: safe == "true", Lang: lang, Type: "text"}
|
log.Printf("Using search engine: %s", engine.Name)
|
||||||
cachedResults, exists := resultsCache.Get(cacheKey)
|
|
||||||
if exists {
|
searchResults, duration, err := engine.Func(query, safe, lang, page)
|
||||||
textResults, _, _ := convertToSpecificResults(cachedResults)
|
updateEngineMetrics(&engine, duration, err == nil)
|
||||||
combinedResults = append(combinedResults, textResults...)
|
if err != nil {
|
||||||
} else {
|
log.Printf("Error performing search with %s: %v", engine.Name, err)
|
||||||
results := fetchAndCacheTextResults(query, safe, lang, currentPage, resultsPerPage)
|
continue
|
||||||
if len(results) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
combinedResults = append(combinedResults, results...)
|
|
||||||
resultsCache.Set(cacheKey, convertToSearchResults(results))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
currentPage++
|
results = append(results, validateResults(searchResults)...)
|
||||||
|
|
||||||
// Stop fetching if we have enough results for the target page and the next page
|
// If results are found, break out of the loop
|
||||||
if len(combinedResults) >= resultsNeeded+resultsPerPage {
|
if len(results) > 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
startIndex := (targetPage - 1) * resultsPerPage
|
return results
|
||||||
endIndex := startIndex + resultsPerPage
|
|
||||||
|
|
||||||
if startIndex >= len(combinedResults) {
|
|
||||||
return []TextSearchResult{}
|
|
||||||
}
|
|
||||||
if endIndex > len(combinedResults) {
|
|
||||||
endIndex = len(combinedResults)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return combinedResults[startIndex:endIndex]
|
func validateResults(searchResults []SearchResult) []TextSearchResult {
|
||||||
|
var validResults []TextSearchResult
|
||||||
|
|
||||||
|
// Remove anything that is missing a URL or Header
|
||||||
|
for _, result := range searchResults {
|
||||||
|
textResult := result.(TextSearchResult)
|
||||||
|
if textResult.URL != "" || textResult.Header != "" {
|
||||||
|
validResults = append(validResults, textResult)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchAndCacheTextResults(query, safe, lang string, page, resultsPerPage int) []TextSearchResult {
|
return validResults
|
||||||
var combinedResults []TextSearchResult
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
var mu sync.Mutex
|
|
||||||
|
|
||||||
resultsChan := make(chan []TextSearchResult)
|
|
||||||
|
|
||||||
searchFuncs := []struct {
|
|
||||||
Func func(string, string, string, int) ([]TextSearchResult, error)
|
|
||||||
Source string
|
|
||||||
}{
|
|
||||||
{PerformGoogleTextSearch, "Google"},
|
|
||||||
{PerformLibreXTextSearch, "LibreX"},
|
|
||||||
// {PerformSearXNGTextSearch, "SearXNG"},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
wg.Add(len(searchFuncs))
|
func wrapTextSearchFunc(f func(string, string, string, int) ([]TextSearchResult, time.Duration, error)) func(string, string, string, int) ([]SearchResult, time.Duration, error) {
|
||||||
|
return func(query, safe, lang string, page int) ([]SearchResult, time.Duration, error) {
|
||||||
for _, searchFunc := range searchFuncs {
|
textResults, duration, err := f(query, safe, lang, page)
|
||||||
go func(searchFunc func(string, string, string, int) ([]TextSearchResult, error), source string) {
|
if err != nil {
|
||||||
defer wg.Done()
|
return nil, duration, err
|
||||||
results, err := searchFunc(query, safe, lang, page)
|
|
||||||
if err == nil {
|
|
||||||
for i := range results {
|
|
||||||
results[i].Source = source
|
|
||||||
}
|
}
|
||||||
resultsChan <- results
|
searchResults := make([]SearchResult, len(textResults))
|
||||||
} else {
|
for i, result := range textResults {
|
||||||
log.Printf("Error performing search from %s: %v", source, err)
|
searchResults[i] = result
|
||||||
}
|
}
|
||||||
}(searchFunc.Func, searchFunc.Source)
|
return searchResults, duration, nil
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
wg.Wait()
|
|
||||||
close(resultsChan)
|
|
||||||
}()
|
|
||||||
|
|
||||||
for results := range resultsChan {
|
|
||||||
mu.Lock()
|
|
||||||
combinedResults = append(combinedResults, results...)
|
|
||||||
mu.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.SliceStable(combinedResults, func(i, j int) bool {
|
|
||||||
return sourceOrder(combinedResults[i].Source) < sourceOrder(combinedResults[j].Source)
|
|
||||||
})
|
|
||||||
|
|
||||||
log.Printf("Fetched %d results for page %d", len(combinedResults), page)
|
|
||||||
|
|
||||||
return combinedResults
|
|
||||||
}
|
|
||||||
|
|
||||||
func sourceOrder(source string) int {
|
|
||||||
switch source {
|
|
||||||
case "Google":
|
|
||||||
return 1
|
|
||||||
case "LibreX":
|
|
||||||
return 2
|
|
||||||
case "SearchXNG":
|
|
||||||
return 3
|
|
||||||
default:
|
|
||||||
return 4
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -216,6 +167,7 @@ func displayResults(w http.ResponseWriter, results []TextSearchResult, query, la
|
||||||
HasNextPage bool
|
HasNextPage bool
|
||||||
LanguageOptions []LanguageOption
|
LanguageOptions []LanguageOption
|
||||||
CurrentLang string
|
CurrentLang string
|
||||||
|
NoResults bool
|
||||||
}{
|
}{
|
||||||
Results: results,
|
Results: results,
|
||||||
Query: query,
|
Query: query,
|
||||||
|
@ -225,6 +177,7 @@ func displayResults(w http.ResponseWriter, results []TextSearchResult, query, la
|
||||||
HasNextPage: hasNextPage,
|
HasNextPage: hasNextPage,
|
||||||
LanguageOptions: languageOptions,
|
LanguageOptions: languageOptions,
|
||||||
CurrentLang: lang,
|
CurrentLang: lang,
|
||||||
|
NoResults: len(results) == 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
err = tmpl.Execute(w, data)
|
err = tmpl.Execute(w, data)
|
||||||
|
|
11
video.go
11
video.go
|
@ -180,16 +180,23 @@ func handleVideoSearch(w http.ResponseWriter, query, safe, lang string, page int
|
||||||
}
|
}
|
||||||
|
|
||||||
elapsed := time.Since(start)
|
elapsed := time.Since(start)
|
||||||
tmpl, err := template.ParseFiles("templates/videos.html")
|
tmpl, err := template.New("videos.html").Funcs(funcs).ParseFiles("templates/videos.html")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Error parsing template: %v", err)
|
log.Printf("Error parsing template: %v", err)
|
||||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpl.Execute(w, map[string]interface{}{
|
err = tmpl.Execute(w, map[string]interface{}{
|
||||||
"Results": results,
|
"Results": results,
|
||||||
"Query": query,
|
"Query": query,
|
||||||
"Fetched": fmt.Sprintf("%.2f seconds", elapsed.Seconds()),
|
"Fetched": fmt.Sprintf("%.2f seconds", elapsed.Seconds()),
|
||||||
|
"Page": page,
|
||||||
|
"HasPrevPage": page > 1,
|
||||||
|
"HasNextPage": len(results) > 0, // assuming you have a way to determine if there are more pages
|
||||||
})
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error executing template: %v", err)
|
||||||
|
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue