feat: Wikidata engine and Wikipedia knowledge infobox

- Add wikidata engine (wbsearchentities), tests, factory/planner/config
- Wikipedia REST summary: infobox from extract, thumbnail, article URL
- InfoboxView URL; render infobox list in results_inner + base styles
- Preferences Wikidata toggle; engine badge color for wikidata

Made-with: Cursor
This commit is contained in:
ashisgreat22 2026-03-24 00:07:12 +01:00
parent 6e45abb150
commit 24577b27be
13 changed files with 344 additions and 34 deletions

View file

@ -50,6 +50,7 @@ func NewDefaultPortedEngines(client *http.Client, cfg *config.Config) map[string
return map[string]Engine{
"wikipedia": &WikipediaEngine{client: client},
"wikidata": &WikidataEngine{client: client},
"arxiv": &ArxivEngine{client: client},
"crossref": &CrossrefEngine{client: client},
"braveapi": &BraveAPIEngine{

View file

@ -24,7 +24,7 @@ import (
)
var defaultPortedEngines = []string{
"wikipedia", "arxiv", "crossref", "braveapi",
"wikipedia", "wikidata", "arxiv", "crossref", "braveapi",
"brave", "qwant", "duckduckgo", "github", "reddit",
"bing", "google", "youtube", "stackoverflow",
// Image engines
@ -106,6 +106,7 @@ func inferFromCategories(categories []string) []string {
switch strings.TrimSpace(strings.ToLower(c)) {
case "general":
set["wikipedia"] = true
set["wikidata"] = true
set["braveapi"] = true
set["qwant"] = true
set["duckduckgo"] = true
@ -134,9 +135,9 @@ func inferFromCategories(categories []string) []string {
}
// stable order
order := map[string]int{
"wikipedia": 0, "braveapi": 1, "brave": 2, "qwant": 3, "duckduckgo": 4, "bing": 5, "google": 6,
"arxiv": 7, "crossref": 8, "github": 9, "stackoverflow": 10, "reddit": 11, "youtube": 12,
"bing_images": 13, "ddg_images": 14, "qwant_images": 15,
"wikipedia": 0, "wikidata": 1, "braveapi": 2, "brave": 3, "qwant": 4, "duckduckgo": 5, "bing": 6, "google": 7,
"arxiv": 8, "crossref": 9, "github": 10, "stackoverflow": 11, "reddit": 12, "youtube": 13,
"bing_images": 14, "ddg_images": 15, "qwant_images": 16,
}
sortByOrder(out, order)
return out

View file

@ -0,0 +1,133 @@
// samsa — a privacy-respecting metasearch engine
// Copyright (C) 2026-present metamorphosis-dev
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
package engines
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"github.com/metamorphosis-dev/samsa/internal/contracts"
)
// wikidataAPIBase is the Wikidata MediaWiki API endpoint (overridable in tests).
var wikidataAPIBase = "https://www.wikidata.org/w/api.php"
// WikidataEngine searches entity labels and descriptions via the Wikidata API.
// See: https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities
type WikidataEngine struct {
client *http.Client
}
func (e *WikidataEngine) Name() string { return "wikidata" }
func (e *WikidataEngine) Search(ctx context.Context, req contracts.SearchRequest) (contracts.SearchResponse, error) {
if e == nil || e.client == nil {
return contracts.SearchResponse{}, errors.New("wikidata engine not initialized")
}
q := strings.TrimSpace(req.Query)
if q == "" {
return contracts.SearchResponse{Query: req.Query}, nil
}
lang := strings.TrimSpace(req.Language)
if lang == "" || lang == "auto" {
lang = "en"
}
lang = strings.SplitN(lang, "-", 2)[0]
lang = strings.ReplaceAll(lang, "_", "-")
if _, ok := validWikipediaLangs[lang]; !ok {
lang = "en"
}
u, err := url.Parse(wikidataAPIBase)
if err != nil {
return contracts.SearchResponse{}, err
}
qv := u.Query()
qv.Set("action", "wbsearchentities")
qv.Set("search", q)
qv.Set("language", lang)
qv.Set("limit", "10")
qv.Set("format", "json")
u.RawQuery = qv.Encode()
httpReq, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
if err != nil {
return contracts.SearchResponse{}, err
}
httpReq.Header.Set("User-Agent", "samsa/1.0 (Wikidata search; +https://github.com/metamorphosis-dev/samsa)")
resp, err := e.client.Do(httpReq)
if err != nil {
return contracts.SearchResponse{}, err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
io.Copy(io.Discard, io.LimitReader(resp.Body, 16*1024))
return contracts.SearchResponse{}, fmt.Errorf("wikidata upstream error: status %d", resp.StatusCode)
}
body, err := io.ReadAll(io.LimitReader(resp.Body, 2*1024*1024))
if err != nil {
return contracts.SearchResponse{}, err
}
var api struct {
Search []struct {
ID string `json:"id"`
Label string `json:"label"`
Description string `json:"description"`
} `json:"search"`
}
if err := json.Unmarshal(body, &api); err != nil {
return contracts.SearchResponse{}, fmt.Errorf("wikidata JSON parse error: %w", err)
}
results := make([]contracts.MainResult, 0, len(api.Search))
for _, hit := range api.Search {
id := strings.TrimSpace(hit.ID)
if id == "" || !strings.HasPrefix(id, "Q") {
continue
}
pageURL := "https://www.wikidata.org/wiki/" + url.PathEscape(id)
title := strings.TrimSpace(hit.Label)
if title == "" {
title = id
}
content := strings.TrimSpace(hit.Description)
urlPtr := pageURL
results = append(results, contracts.MainResult{
Template: "default.html",
Title: title,
Content: content,
URL: &urlPtr,
Engine: "wikidata",
Category: "general",
Engines: []string{"wikidata"},
})
}
return contracts.SearchResponse{
Query: req.Query,
NumberOfResults: len(results),
Results: results,
Answers: []map[string]any{},
Corrections: []string{},
Infoboxes: []map[string]any{},
Suggestions: []string{},
UnresponsiveEngines: [][2]string{},
}, nil
}

View file

@ -0,0 +1,51 @@
package engines
import (
"context"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/metamorphosis-dev/samsa/internal/contracts"
)
func TestWikidataEngine_Search(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Query().Get("action") != "wbsearchentities" {
t.Errorf("action=%q", r.URL.Query().Get("action"))
}
if got := r.URL.Query().Get("search"); got != "test" {
t.Errorf("search=%q want test", got)
}
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"search":[{"id":"Q937","label":"Go","description":"Programming language"}]}`))
}))
defer ts.Close()
orig := wikidataAPIBase
t.Cleanup(func() { wikidataAPIBase = orig })
wikidataAPIBase = ts.URL + "/w/api.php"
e := &WikidataEngine{client: ts.Client()}
resp, err := e.Search(context.Background(), contracts.SearchRequest{
Query: "test",
Language: "en",
})
if err != nil {
t.Fatal(err)
}
if len(resp.Results) != 1 {
t.Fatalf("expected 1 result, got %d", len(resp.Results))
}
r0 := resp.Results[0]
if r0.Engine != "wikidata" {
t.Errorf("engine=%q", r0.Engine)
}
if r0.Title != "Go" {
t.Errorf("title=%q", r0.Title)
}
if r0.URL == nil || !strings.Contains(*r0.URL, "Q937") {
t.Errorf("url=%v", r0.URL)
}
}

View file

@ -123,13 +123,13 @@ func (e *WikipediaEngine) Search(ctx context.Context, req contracts.SearchReques
if resp.StatusCode == http.StatusNotFound {
return contracts.SearchResponse{
Query: req.Query,
NumberOfResults: 0,
Results: []contracts.MainResult{},
Answers: []map[string]any{},
Corrections: []string{},
Infoboxes: []map[string]any{},
Suggestions: []string{},
Query: req.Query,
NumberOfResults: 0,
Results: []contracts.MainResult{},
Answers: []map[string]any{},
Corrections: []string{},
Infoboxes: []map[string]any{},
Suggestions: []string{},
UnresponsiveEngines: [][2]string{},
}, nil
}
@ -141,9 +141,13 @@ func (e *WikipediaEngine) Search(ctx context.Context, req contracts.SearchReques
var api struct {
Title string `json:"title"`
Description string `json:"description"`
Extract string `json:"extract"`
Titles struct {
Display string `json:"display"`
} `json:"titles"`
Thumbnail struct {
Source string `json:"source"`
} `json:"thumbnail"`
ContentURLs struct {
Desktop struct {
Page string `json:"page"`
@ -160,7 +164,7 @@ func (e *WikipediaEngine) Search(ctx context.Context, req contracts.SearchReques
// API returned a non-standard payload; treat as no result.
return contracts.SearchResponse{
Query: req.Query,
NumberOfResults: 0,
NumberOfResults: 0,
Results: []contracts.MainResult{},
Answers: []map[string]any{},
Corrections: []string{},
@ -175,36 +179,61 @@ func (e *WikipediaEngine) Search(ctx context.Context, req contracts.SearchReques
title = api.Title
}
content := api.Description
content := strings.TrimSpace(api.Extract)
if content == "" {
content = strings.TrimSpace(api.Description)
}
urlPtr := pageURL
pub := (*string)(nil)
// Knowledge infobox for HTML (Wikipedia REST summary: title, extract, thumbnail, link).
var infoboxes []map[string]any
ibTitle := api.Titles.Display
if ibTitle == "" {
ibTitle = api.Title
}
body := strings.TrimSpace(api.Extract)
if body == "" {
body = strings.TrimSpace(api.Description)
}
imgSrc := strings.TrimSpace(api.Thumbnail.Source)
if ibTitle != "" || body != "" || imgSrc != "" {
row := map[string]any{
"title": ibTitle,
"infobox": body,
"url": pageURL,
}
if imgSrc != "" {
row["img_src"] = imgSrc
}
infoboxes = append(infoboxes, row)
}
results := []contracts.MainResult{
{
Template: "default.html",
Title: title,
Content: content,
URL: &urlPtr,
Pubdate: pub,
Engine: "wikipedia",
Score: 0,
Category: "general",
Priority: "",
Template: "default.html",
Title: title,
Content: content,
URL: &urlPtr,
Pubdate: pub,
Engine: "wikipedia",
Score: 0,
Category: "general",
Priority: "",
Positions: nil,
Engines: []string{"wikipedia"},
Engines: []string{"wikipedia"},
},
}
return contracts.SearchResponse{
Query: req.Query,
NumberOfResults: len(results),
NumberOfResults: len(results),
Results: results,
Answers: []map[string]any{},
Corrections: []string{},
Infoboxes: []map[string]any{},
Infoboxes: infoboxes,
Suggestions: []string{},
UnresponsiveEngines: [][2]string{},
}, nil
}