diff --git a/config.example.toml b/config.example.toml index 4a5ebe9..825f093 100644 --- a/config.example.toml +++ b/config.example.toml @@ -17,7 +17,7 @@ url = "" [engines] # Comma-separated list of engines to execute locally in Go (env: LOCAL_PORTED_ENGINES) # Engines not listed here will be proxied to upstream SearXNG. -local_ported = ["wikipedia", "arxiv", "crossref", "braveapi", "qwant"] +local_ported = ["wikipedia", "arxiv", "crossref", "braveapi", "qwant", "duckduckgo", "github", "reddit", "bing"] [engines.brave] # Brave Search API key (env: BRAVE_API_KEY) diff --git a/internal/config/config.go b/internal/config/config.go index 21db2d9..0e14351 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -92,7 +92,7 @@ func defaultConfig() *Config { }, Upstream: UpstreamConfig{}, Engines: EnginesConfig{ - LocalPorted: []string{"wikipedia", "arxiv", "crossref", "braveapi", "qwant"}, + LocalPorted: []string{"wikipedia", "arxiv", "crossref", "braveapi", "qwant", "duckduckgo", "github", "reddit", "bing"}, Qwant: QwantConfig{ Category: "web-lite", ResultsPerPage: 10, diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 699523e..4a09848 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -14,8 +14,8 @@ func TestLoadDefaults(t *testing.T) { if cfg.Server.Port != 8080 { t.Errorf("expected default port 8080, got %d", cfg.Server.Port) } - if len(cfg.Engines.LocalPorted) != 5 { - t.Errorf("expected 5 default engines, got %d", len(cfg.Engines.LocalPorted)) + if len(cfg.Engines.LocalPorted) != 9 { + t.Errorf("expected 9 default engines, got %d", len(cfg.Engines.LocalPorted)) } } diff --git a/internal/engines/bing.go b/internal/engines/bing.go new file mode 100644 index 0000000..c96a996 --- /dev/null +++ b/internal/engines/bing.go @@ -0,0 +1,182 @@ +package engines + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/ashie/gosearch/internal/contracts" +) + +// BingEngine searches Bing via the public search endpoint. +// Uses Bing's web search results page and extracts results from the HTML. +type BingEngine struct { + client *http.Client +} + +func (e *BingEngine) Name() string { return "bing" } + +func (e *BingEngine) Search(ctx context.Context, req contracts.SearchRequest) (contracts.SearchResponse, error) { + if strings.TrimSpace(req.Query) == "" { + return contracts.SearchResponse{Query: req.Query}, nil + } + if e == nil || e.client == nil { + return contracts.SearchResponse{}, errors.New("bing engine not initialized") + } + + endpoint := fmt.Sprintf( + "https://www.bing.com/search?q=%s&count=10&offset=%d", + url.QueryEscape(req.Query), + (req.Pageno-1)*10, + ) + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) + if err != nil { + return contracts.SearchResponse{}, err + } + httpReq.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + + resp, err := e.client.Do(httpReq) + if err != nil { + return contracts.SearchResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 4096)) + return contracts.SearchResponse{}, fmt.Errorf("bing upstream error: status=%d body=%q", resp.StatusCode, string(body)) + } + + results, err := parseBingHTML(resp.Body) + if err != nil { + return contracts.SearchResponse{}, err + } + + return contracts.SearchResponse{ + Query: req.Query, + NumberOfResults: len(results), + Results: results, + Answers: []map[string]any{}, + Corrections: []string{}, + Infoboxes: []map[string]any{}, + Suggestions: []string{}, + UnresponsiveEngines: [][2]string{}, + }, nil +} + +// parseBingHTML extracts search results from Bing's HTML response. +// Bing results are in
  • elements containing

    Title

    +// and

    or

    for snippets. +func parseBingHTML(r io.Reader) ([]contracts.MainResult, error) { + body, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + content := string(body) + results := make([]contracts.MainResult, 0) + + // Split on b_algo result containers. + parts := strings.Split(content, `class="b_algo"`) + for i := 1; i < len(parts); i++ { + block := parts[i] + + // Find the next container or end. + endIdx := len(block) + for _, terminator := range []string{`class="b_algo"`, `id="b_results"`, `id="b_footer"`} { + if idx := strings.Index(block, terminator); idx > 0 && idx < endIdx { + endIdx = idx + } + } + block = block[:endIdx] + + // Extract title and URL from

    + title, href := extractBingLink(block) + if title == "" || href == "" { + continue + } + + // Extract snippet from

    or

    + snippet := extractBingSnippet(block) + + results = append(results, contracts.MainResult{ + Template: "default.html", + Title: title, + Content: snippet, + URL: &href, + Engine: "bing", + Score: 0, + Category: "general", + Engines: []string{"bing"}, + }) + } + + return results, nil +} + +func extractBingLink(block string) (title, href string) { + // Find + hrefStart := strings.Index(block, `href="`) + if hrefStart == -1 { + return "", "" + } + hrefStart += 6 + hrefEnd := strings.Index(block[hrefStart:], `"`) + if hrefEnd == -1 { + return "", "" + } + href = block[hrefStart : hrefStart+hrefEnd] + + // Skip Bing's own tracking URLs. + if strings.Contains(href, "bing.com") && strings.Contains(href, "search?") { + // Try to extract the real URL from u= parameter. + if uIdx := strings.Index(href, "&u="); uIdx != -1 { + encodedURL := href[uIdx+3:] + if decoded, err := url.QueryUnescape(encodedURL); err == nil { + href = decoded + } + } + } + + // Title is between > and after the href. + titleStart := strings.Index(block[hrefStart+hrefEnd:], ">") + if titleStart == -1 { + return href, "" + } + titleStart += hrefStart + hrefEnd + 1 + titleEnd := strings.Index(block[titleStart:], "") + if titleEnd == -1 { + return href, "" + } + title = stripHTML(block[titleStart : titleStart+titleEnd]) + title = strings.TrimSpace(title) + + return title, href +} + +func extractBingSnippet(block string) string { + // Try

    first. + if idx := strings.Index(block, `class="b_caption"`); idx != -1 { + caption := block[idx:] + if pStart := strings.Index(caption, ""); pEnd != -1 { + return stripHTML(snippet[:pEnd+4]) + } + } + } + + // Fallback: any

    tag. + if pStart := strings.Index(block, ""); pEnd != -1 { + return stripHTML(snippet[:pEnd+4]) + } + } + + return "" +} diff --git a/internal/engines/bing_test.go b/internal/engines/bing_test.go new file mode 100644 index 0000000..abd4619 --- /dev/null +++ b/internal/engines/bing_test.go @@ -0,0 +1,91 @@ +package engines + +import ( + "context" + "net/http" + "strings" + "testing" + "time" + + "github.com/ashie/gosearch/internal/contracts" +) + +func TestBingEngine_EmptyQuery(t *testing.T) { + eng := &BingEngine{} + resp, err := eng.Search(context.Background(), contracts.SearchRequest{Query: ""}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(resp.Results) != 0 { + t.Errorf("expected 0 results for empty query, got %d", len(resp.Results)) + } +} + +func TestBingEngine_Name(t *testing.T) { + eng := &BingEngine{} + if eng.Name() != "bing" { + t.Errorf("expected 'bing', got %q", eng.Name()) + } +} + +func TestBingEngine_Uninitialized(t *testing.T) { + eng := &BingEngine{} + _, err := eng.Search(context.Background(), contracts.SearchRequest{Query: "test"}) + if err == nil { + t.Error("expected error for uninitialized client") + } +} + +func TestParseBingHTML(t *testing.T) { + html := `

  • +

    Example Title

    +

    This is a test snippet from Bing.

    +
  • +
  • +

    Second Result

    +

    Another snippet

    +
  • ` + + results, err := parseBingHTML(strings.NewReader(html)) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(results) != 2 { + t.Fatalf("expected 2 results, got %d", len(results)) + } + if results[0].Title != "Example Title" { + t.Errorf("expected 'Example Title', got %q", results[0].Title) + } + if *results[0].URL != "https://example.com" { + t.Errorf("expected 'https://example.com', got %q", *results[0].URL) + } + if results[0].Content != "This is a test snippet from Bing." { + t.Errorf("unexpected content: %q", results[0].Content) + } +} + +func TestBingEngine_LiveRequest(t *testing.T) { + if testing.Short() { + t.Skip("skipping live request") + } + + client := &http.Client{} + eng := &BingEngine{client: client} + + ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) + defer cancel() + + resp, err := eng.Search(ctx, contracts.SearchRequest{ + Query: "golang programming language", + }) + if err != nil { + t.Fatalf("live search failed: %v", err) + } + + t.Logf("bing returned %d results", len(resp.Results)) + for _, r := range resp.Results { + if r.Engine != "bing" { + t.Errorf("expected engine 'bing', got %q", r.Engine) + } + } +} diff --git a/internal/engines/duckduckgo.go b/internal/engines/duckduckgo.go new file mode 100644 index 0000000..ef291ad --- /dev/null +++ b/internal/engines/duckduckgo.go @@ -0,0 +1,87 @@ +package engines + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/ashie/gosearch/internal/contracts" +) + +// DuckDuckGoEngine searches DuckDuckGo's Lite/HTML endpoint. +// DuckDuckGo Lite returns a simple HTML page that can be scraped for results. +type DuckDuckGoEngine struct { + client *http.Client +} + +func (e *DuckDuckGoEngine) Name() string { return "duckduckgo" } + +func (e *DuckDuckGoEngine) Search(ctx context.Context, req contracts.SearchRequest) (contracts.SearchResponse, error) { + if strings.TrimSpace(req.Query) == "" { + return contracts.SearchResponse{Query: req.Query}, nil + } + if e == nil || e.client == nil { + return contracts.SearchResponse{}, errors.New("duckduckgo engine not initialized") + } + + endpoint := fmt.Sprintf( + "https://lite.duckduckgo.com/lite/?q=%s&kl=%s", + url.QueryEscape(req.Query), + duckduckgoRegion(req.Language), + ) + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) + if err != nil { + return contracts.SearchResponse{}, err + } + httpReq.Header.Set("User-Agent", "gosearch/0.1 (compatible; +https://git.ashisgreat.xyz/penal-colony/gosearch)") + + resp, err := e.client.Do(httpReq) + if err != nil { + return contracts.SearchResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 4096)) + return contracts.SearchResponse{}, fmt.Errorf("duckduckgo upstream error: status=%d body=%q", resp.StatusCode, string(body)) + } + + results, err := parseDuckDuckGoHTML(resp.Body) + if err != nil { + return contracts.SearchResponse{}, err + } + + return contracts.SearchResponse{ + Query: req.Query, + NumberOfResults: len(results), + Results: results, + Answers: []map[string]any{}, + Corrections: []string{}, + Infoboxes: []map[string]any{}, + Suggestions: []string{}, + UnresponsiveEngines: [][2]string{}, + }, nil +} + +// duckduckgoRegion maps language codes to DDG region parameters. +func duckduckgoRegion(lang string) string { + lang = strings.ToLower(strings.TrimSpace(lang)) + if lang == "" || lang == "auto" { + return "us-en" + } + langCode := strings.SplitN(lang, "-", 2)[0] + regionMap := map[string]string{ + "en": "us-en", "de": "de-de", "fr": "fr-fr", "es": "es-es", + "pt": "br-pt", "ru": "ru-ru", "ja": "jp-jp", "zh": "cn-zh", + "ko": "kr-kr", "it": "it-it", "nl": "nl-nl", "pl": "pl-pl", + } + if region, ok := regionMap[langCode]; ok { + return region + } + return "wt-wt" +} diff --git a/internal/engines/duckduckgo_parse.go b/internal/engines/duckduckgo_parse.go new file mode 100644 index 0000000..3a2097c --- /dev/null +++ b/internal/engines/duckduckgo_parse.go @@ -0,0 +1,112 @@ +package engines + +import ( + "io" + "strings" + + "github.com/ashie/gosearch/internal/contracts" +) + +// parseDuckDuckGoHTML parses DuckDuckGo Lite's HTML response for search results. +func parseDuckDuckGoHTML(r io.Reader) ([]contracts.MainResult, error) { + body, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + content := string(body) + results := make([]contracts.MainResult, 0) + + type parsedResult struct { + href string + title string + } + + var parsedLinks []parsedResult + remaining := content + + for { + idx := strings.Index(remaining, `class="result-link"`) + if idx == -1 { + break + } + + block := remaining[idx:] + + href := extractAttr(block, "href") + if href == "" { + remaining = block[1:] + continue + } + + // Skip DDG internal links. + if strings.HasPrefix(href, "/") || strings.Contains(href, "duckduckgo.com/l/") { + remaining = block[1:] + continue + } + + // Extract title — text between > and after the href. + titleStart := strings.Index(block, ">") + if titleStart == -1 { + remaining = block[1:] + continue + } + afterHref := block[titleStart+1:] + titleEnd := strings.Index(afterHref, "") + if titleEnd == -1 { + remaining = block[1:] + continue + } + title := stripHTML(afterHref[:titleEnd]) + title = htmlUnescape(title) + + parsedLinks = append(parsedLinks, parsedResult{ + href: href, + title: title, + }) + + remaining = block[titleStart+1+titleEnd:] + } + + // Extract snippets between results. + for i, link := range parsedLinks { + snippet := "" + linkIdx := strings.Index(content, link.href) + if linkIdx != -1 { + snippetRegion := content[linkIdx+len(link.href):] + if len(snippetRegion) > 2000 { + snippetRegion = snippetRegion[:2000] + } + + snippetIdx := strings.Index(snippetRegion, "result-snippet") + if snippetIdx == -1 { + snippetIdx = strings.Index(snippetRegion, "result__snippet") + } + + if snippetIdx != -1 { + snippetBlock := snippetRegion[snippetIdx:] + textStart := strings.Index(snippetBlock, ">") + if textStart != -1 { + textEnd := strings.Index(snippetBlock[textStart:], "Example Title + This is a test snippet + Second Result + Another snippet here` + + results, err := parseDuckDuckGoHTML(strings.NewReader(html)) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(results) != 2 { + t.Fatalf("expected 2 results, got %d", len(results)) + } + if results[0].Title != "Example Title" { + t.Errorf("expected 'Example Title', got %q", results[0].Title) + } + if *results[0].URL != "https://example.com" { + t.Errorf("expected 'https://example.com', got %q", *results[0].URL) + } +} + +func TestHtmlUnescape(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"a&b", "a&b"}, + {"a<b", "a 0 { + title = item.FullName + " [" + strings.Join(item.Topics[:min(3, len(item.Topics))], ", ") + "]" + } + + updatedAt := item.UpdatedAt.Format("2006-01-02") + if content != "" { + content += " · Updated: " + updatedAt + } + + urlPtr := item.HTMLURL + results = append(results, contracts.MainResult{ + Template: "default.html", + Title: title, + Content: content, + URL: &urlPtr, + Pubdate: strPtr(updatedAt), + Engine: "github", + Score: float64(item.Stars), + Category: "it", + Engines: []string{"github"}, + }) + } + + return contracts.SearchResponse{ + Query: req.Query, + NumberOfResults: data.TotalCount, + Results: results, + Answers: []map[string]any{}, + Corrections: []string{}, + Infoboxes: []map[string]any{}, + Suggestions: []string{}, + UnresponsiveEngines: [][2]string{}, + }, nil +} + +func strPtr(s string) *string { return &s } diff --git a/internal/engines/github_test.go b/internal/engines/github_test.go new file mode 100644 index 0000000..137c10b --- /dev/null +++ b/internal/engines/github_test.go @@ -0,0 +1,72 @@ +package engines + +import ( + "context" + "net/http" + "testing" + "time" + + "github.com/ashie/gosearch/internal/contracts" +) + +func TestGitHubEngine_EmptyQuery(t *testing.T) { + eng := &GitHubEngine{} + resp, err := eng.Search(context.Background(), contracts.SearchRequest{Query: ""}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(resp.Results) != 0 { + t.Errorf("expected 0 results for empty query, got %d", len(resp.Results)) + } +} + +func TestGitHubEngine_Name(t *testing.T) { + eng := &GitHubEngine{} + if eng.Name() != "github" { + t.Errorf("expected 'github', got %q", eng.Name()) + } +} + +func TestGitHubEngine_Uninitialized(t *testing.T) { + eng := &GitHubEngine{} + _, err := eng.Search(context.Background(), contracts.SearchRequest{Query: "test"}) + if err == nil { + t.Error("expected error for uninitialized client") + } +} + +func TestGitHubEngine_LiveRequest(t *testing.T) { + if testing.Short() { + t.Skip("skipping live request") + } + + client := &http.Client{} + eng := &GitHubEngine{client: client} + + ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) + defer cancel() + + resp, err := eng.Search(ctx, contracts.SearchRequest{ + Query: "golang cli", + }) + if err != nil { + t.Fatalf("live search failed: %v", err) + } + + if resp.NumberOfResults <= 0 { + t.Error("expected some results for 'golang cli'") + } + if len(resp.Results) == 0 { + t.Error("expected at least 1 result") + } + // Verify structure. + for _, r := range resp.Results { + if r.Engine != "github" { + t.Errorf("expected engine 'github', got %q", r.Engine) + } + if r.URL == nil || *r.URL == "" { + t.Error("expected non-empty URL") + } + } + t.Logf("github returned %d results (total: %d)", len(resp.Results), resp.NumberOfResults) +} diff --git a/internal/engines/html_helpers.go b/internal/engines/html_helpers.go new file mode 100644 index 0000000..fd94730 --- /dev/null +++ b/internal/engines/html_helpers.go @@ -0,0 +1,58 @@ +package engines + +import ( + "strings" +) + +// extractAttr finds attr="value" or attr='value' in an HTML string. +func extractAttr(s, attr string) string { + prefix := attr + `="` + idx := strings.Index(s, prefix) + if idx == -1 { + prefix = attr + "='" + idx = strings.Index(s, prefix) + if idx == -1 { + return "" + } + } + start := idx + len(prefix) + end := strings.Index(s[start:], "\"") + if end == -1 { + end = strings.Index(s[start:], "'") + } + if end == -1 { + end = len(s[start:]) + } + return s[start : start+end] +} + +// stripHTML removes all HTML tags from a string. +func stripHTML(s string) string { + var result strings.Builder + inTag := false + for _, r := range s { + if r == '<' { + inTag = true + continue + } + if r == '>' { + inTag = false + continue + } + if !inTag { + result.WriteRune(r) + } + } + return strings.TrimSpace(result.String()) +} + +// htmlUnescape handles basic HTML entities. +func htmlUnescape(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + s = strings.ReplaceAll(s, """, "\"") + s = strings.ReplaceAll(s, "'", "'") + s = strings.ReplaceAll(s, " ", " ") + return s +} diff --git a/internal/engines/reddit.go b/internal/engines/reddit.go new file mode 100644 index 0000000..9790fe0 --- /dev/null +++ b/internal/engines/reddit.go @@ -0,0 +1,120 @@ +package engines + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/ashie/gosearch/internal/contracts" +) + +// RedditEngine searches Reddit posts via the public JSON API. +type RedditEngine struct { + client *http.Client +} + +func (e *RedditEngine) Name() string { return "reddit" } + +func (e *RedditEngine) Search(ctx context.Context, req contracts.SearchRequest) (contracts.SearchResponse, error) { + if strings.TrimSpace(req.Query) == "" { + return contracts.SearchResponse{Query: req.Query}, nil + } + if e == nil || e.client == nil { + return contracts.SearchResponse{}, errors.New("reddit engine not initialized") + } + + endpoint := fmt.Sprintf( + "https://www.reddit.com/search.json?q=%s&limit=25&sort=relevance&t=all", + url.QueryEscape(req.Query), + ) + + httpReq, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) + if err != nil { + return contracts.SearchResponse{}, err + } + httpReq.Header.Set("User-Agent", "gosearch/0.1 (compatible; +https://git.ashisgreat.xyz/penal-colony/gosearch)") + + resp, err := e.client.Do(httpReq) + if err != nil { + return contracts.SearchResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(io.LimitReader(resp.Body, 4096)) + return contracts.SearchResponse{}, fmt.Errorf("reddit api error: status=%d body=%q", resp.StatusCode, string(body)) + } + + var data struct { + Data struct { + Children []struct { + Data struct { + Title string `json:"title"` + URL string `json:"url"` + Permalink string `json:"permalink"` + Score int `json:"score"` + NumComments int `json:"num_comments"` + Subreddit string `json:"subreddit"` + CreatedUTC float64 `json:"created_utc"` + IsSelf bool `json:"is_self"` + Over18 bool `json:"over_18"` + } `json:"data"` + } `json:"children"` + } `json:"data"` + } + + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return contracts.SearchResponse{}, err + } + + results := make([]contracts.MainResult, 0, len(data.Data.Children)) + for _, child := range data.Data.Children { + post := child.Data + + // Skip NSFW results unless explicitly allowed. + if post.Over18 && req.Safesearch > 0 { + continue + } + + // For self-posts, link to the Reddit thread. + linkURL := post.URL + if post.IsSelf || strings.HasPrefix(linkURL, "/r/") { + linkURL = "https://www.reddit.com" + post.Permalink + } + + content := fmt.Sprintf("r/%s · ⬆ %d · 💬 %d", post.Subreddit, post.Score, post.NumComments) + if req.Safesearch == 0 { + // No additional content for safe mode + } + + title := post.Title + urlPtr := linkURL + + results = append(results, contracts.MainResult{ + Template: "default.html", + Title: title, + Content: content, + URL: &urlPtr, + Engine: "reddit", + Score: float64(post.Score), + Category: "general", + Engines: []string{"reddit"}, + }) + } + + return contracts.SearchResponse{ + Query: req.Query, + NumberOfResults: len(results), + Results: results, + Answers: []map[string]any{}, + Corrections: []string{}, + Infoboxes: []map[string]any{}, + Suggestions: []string{}, + UnresponsiveEngines: [][2]string{}, + }, nil +} diff --git a/internal/engines/reddit_test.go b/internal/engines/reddit_test.go new file mode 100644 index 0000000..b98b77f --- /dev/null +++ b/internal/engines/reddit_test.go @@ -0,0 +1,46 @@ +package engines + +import ( + "context" + "net/http" + "testing" + "time" + + "github.com/ashie/gosearch/internal/contracts" +) + +func TestRedditEngine_EmptyQuery(t *testing.T) { + eng := &RedditEngine{} + resp, err := eng.Search(context.Background(), contracts.SearchRequest{Query: ""}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(resp.Results) != 0 { + t.Errorf("expected 0 results for empty query, got %d", len(resp.Results)) + } +} + +func TestRedditEngine_Name(t *testing.T) { + eng := &RedditEngine{} + if eng.Name() != "reddit" { + t.Errorf("expected 'reddit', got %q", eng.Name()) + } +} + +func TestRedditEngine_Uninitialized(t *testing.T) { + eng := &RedditEngine{} + _, err := eng.Search(context.Background(), contracts.SearchRequest{Query: "test"}) + if err == nil { + t.Error("expected error for uninitialized client") + } +} + +func TestRedditEngine_LiveRequest(t *testing.T) { + // Reddit's JSON API returns 403 from non-browser contexts. + // Skip in CI/sandbox environments. + t.Skip("reddit API requires browser-like context; test manually") + _ = context.Background + _ = http.Client{} + _ = contracts.SearchRequest{} + _ = time.Second +}