Merge pull request 'bug fixes' (#10) from work into main

Reviewed-on: #10
This commit is contained in:
Internet Addict 2024-08-21 11:08:31 +00:00
commit a4285b8939
4 changed files with 45 additions and 31 deletions

View file

@ -32,12 +32,12 @@ A self-hosted private and anonymous [metasearch engine](https://en.wikipedia.org
## Comparison to other search engines ## Comparison to other search engines
| Feature | Whoogle | Araa-Search | LibreY | 4get | *Warp* | | Feature | Whoogle [1] | Araa-Search | LibreY | 4get | *Warp* |
| :----------------------------------- | ------------------ | ------------------------- | ------------------------ | ------------------------ | ---------------------------------------------------- | | :----------------------------------- | ------------------ | ------------------------- | ------------------------ | ------------------------ | ---------------------------------------------------- |
| Works without JavaScript | ✅ | ✅ | ✅ | ✅ | ✅ | | Works without JavaScript | ✅ | ✅ | ✅ | ✅ | ✅ |
| Music search | ❓ | ❌ | ❌ | ✅ | ✅ | | Music search | ❓ | ❌ | ❌ | ✅ | ✅ |
| Torrent search | ❌ | ✅ | ✅ | ❌ | ✅ | | Torrent search | ❌ | ✅ | ✅ | ❌ | ✅ |
| API | ❌ | ✅ | | ✅ | ✅ | | API | ❌ | ✅ | ❓ [2] | ✅ | ✅ |
| Scalable | ❌ | ❌ | ❌ | ❌ | ✅ | | Scalable | ❌ | ❌ | ❌ | ❌ | ✅ |
| Not Resource Hungry | ❓ Moderate | ❌ Very resource hungry | ❌ Moderate 200-400mb~ | ❌ Moderate 200-400mb~ | ✅ about 15-20MiB at idle, 17-22MiB when searching | | Not Resource Hungry | ❓ Moderate | ❌ Very resource hungry | ❌ Moderate 200-400mb~ | ❌ Moderate 200-400mb~ | ✅ about 15-20MiB at idle, 17-22MiB when searching |
| Dynamic Page Loading | ❓ Not specified | ❌ | ❌ | ❌ | ✅ | | Dynamic Page Loading | ❓ Not specified | ❌ | ❌ | ❌ | ✅ |

View file

@ -1,6 +1,7 @@
package main package main
import ( import (
"encoding/json"
"fmt" "fmt"
"net/http" "net/http"
"net/url" "net/url"
@ -36,45 +37,52 @@ func PerformBingImageSearch(query, safe, lang string, page int) ([]ImageSearchRe
// Extract data using goquery // Extract data using goquery
var results []ImageSearchResult var results []ImageSearchResult
doc.Find(".imgpt").Each(func(i int, s *goquery.Selection) { doc.Find(".iusc").Each(func(i int, s *goquery.Selection) {
// Extract image source
imgTag := s.Find("img") imgTag := s.Find("img")
imgSrc, exists := imgTag.Attr("src") imgSrc, exists := imgTag.Attr("src")
if !exists { if !exists {
return imgSrc, exists = imgTag.Attr("data-src")
if !exists {
return
}
} }
title, _ := imgTag.Attr("alt") // Extract the image title from `alt` attribute
title := imgTag.AttrOr("alt", "")
// Extract width and height if available // Extract width and height if available
width, _ := strconv.Atoi(imgTag.AttrOr("width", "0")) width, _ := strconv.Atoi(imgTag.AttrOr("width", "0"))
height, _ := strconv.Atoi(imgTag.AttrOr("height", "0")) height, _ := strconv.Atoi(imgTag.AttrOr("height", "0"))
// Extract the original image URL from the `mediaurl` parameter in the link // Extract the m parameter (JSON-encoded image metadata)
pageLink, exists := s.Find("a.iusc").Attr("href") metadata, exists := s.Attr("m")
mediaURL := "" if !exists {
if exists { return
if u, err := url.Parse(pageLink); err == nil {
if mediaURLParam := u.Query().Get("mediaurl"); mediaURLParam != "" {
mediaURL, _ = url.QueryUnescape(mediaURLParam)
}
}
} }
results = append(results, ImageSearchResult{ // Parse the metadata to get the media URL (the original image source)
Thumbnail: imgSrc, var data map[string]interface{}
Title: strings.TrimSpace(title), if err := json.Unmarshal([]byte(metadata), &data); err == nil {
Media: imgSrc, mediaURL, ok := data["murl"].(string)
Width: width, if ok {
Height: height, results = append(results, ImageSearchResult{
Source: mediaURL, // Original image URL Thumbnail: imgSrc,
ThumbProxy: imgSrc, Title: strings.TrimSpace(title),
}) Media: mediaURL,
Width: width,
Height: height,
Source: mediaURL,
ThumbProxy: imgSrc,
})
}
}
}) })
duration := time.Since(startTime) duration := time.Since(startTime)
// Check if the number of results is one or less // Check if the number of results is one or less
if len(results) <= 1 { if len(results) == 0 {
return nil, duration, fmt.Errorf("no images found") return nil, duration, fmt.Errorf("no images found")
} }

View file

@ -3,6 +3,7 @@ package main
import ( import (
"html/template" "html/template"
"net/http" "net/http"
"time"
) )
type UserSettings struct { type UserSettings struct {
@ -39,26 +40,31 @@ func loadUserSettings(r *http.Request) UserSettings {
} }
func saveUserSettings(w http.ResponseWriter, settings UserSettings) { func saveUserSettings(w http.ResponseWriter, settings UserSettings) {
expiration := time.Now().Add(90 * 24 * time.Hour) // 90 days from now
http.SetCookie(w, &http.Cookie{ http.SetCookie(w, &http.Cookie{
Name: "theme", Name: "theme",
Value: settings.Theme, Value: settings.Theme,
Path: "/", Path: "/",
Secure: true, // Ensure cookie is sent over HTTPS only Expires: expiration, // Expiration time needs to be set otherwise it will expire immediately
SameSite: http.SameSiteNoneMode, // Set SameSite to None Secure: true, // Ensure cookie is sent over HTTPS only
SameSite: http.SameSiteStrictMode,
}) })
http.SetCookie(w, &http.Cookie{ http.SetCookie(w, &http.Cookie{
Name: "language", Name: "language",
Value: settings.Language, Value: settings.Language,
Path: "/", Path: "/",
Secure: true, // Ensure cookie is sent over HTTPS only Expires: expiration,
SameSite: http.SameSiteNoneMode, // Set SameSite to None Secure: true,
SameSite: http.SameSiteStrictMode,
}) })
http.SetCookie(w, &http.Cookie{ http.SetCookie(w, &http.Cookie{
Name: "safe", Name: "safe",
Value: settings.SafeSearch, Value: settings.SafeSearch,
Path: "/", Path: "/",
Secure: true, // Ensure cookie is sent over HTTPS only Expires: expiration,
SameSite: http.SameSiteNoneMode, // Set SameSite to None Secure: true,
SameSite: http.SameSiteStrictMode,
}) })
printDebug("settings saved: %v", settings) printDebug("settings saved: %v", settings)

2
video.go Executable file → Normal file
View file

@ -206,7 +206,7 @@ func fetchVideoResults(query, safe, lang string, page int) []VideoResult {
Views: formatViews(item.Views), Views: formatViews(item.Views),
Creator: item.UploaderName, Creator: item.UploaderName,
Publisher: "Piped", Publisher: "Piped",
Image: fmt.Sprintf("/img_proxy?url=%s", url.QueryEscape(item.Thumbnail)), Image: item.Thumbnail, //fmt.Sprintf("/img_proxy?url=%s", url.QueryEscape(item.Thumbnail)), // Using image proxy is not working, but its not needed here as piped is proxy anyway
Duration: formatDuration(item.Duration), Duration: formatDuration(item.Duration),
}) })
} }