-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathclient.go
125 lines (107 loc) · 2.98 KB
/
client.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
package main
import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strings"
)
type Client struct {
Lang string
WikiUrl string
ApiUrl string
}
func NewClient(lang string, unformattedWikiUrl string, unformattedApiUrl string) (*Client, error) {
if _, ok := WikipediaLangs[lang]; !ok {
return nil, fmt.Errorf("wikipedia language %s does not exist", lang)
}
client := &Client{
Lang: lang,
WikiUrl: fmt.Sprintf("https://%s.%s", lang, unformattedWikiUrl),
ApiUrl: fmt.Sprintf("https://%s.%s", lang, unformattedApiUrl),
}
return client, nil
}
func (c *Client) fetch(result WikipediaJSON, apiUrl string) error {
resp, err := http.Get(apiUrl)
if err != nil {
return errors.New("couldn't fetch data from Wikipedia API")
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected HTTP status: %s", resp.Status)
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return errors.New("couldn't read response body")
}
err = json.Unmarshal(body, &result)
if err != nil {
return errors.New("couldn't decode JSON response")
}
return nil
}
func (c *Client) LoadSearchList(queryText string) (map[int]Article, error) {
if strings.TrimSpace(queryText) == "" {
return nil, nil
}
params := url.Values{}
params.Add("action", "query")
params.Add("list", "search")
params.Add("srsearch", queryText)
params.Add("utf8", "")
params.Add("format", "json")
params.Add("srlimit", "6")
params.Add("srprop", "snippet")
apiUrl := c.ApiUrl + params.Encode()
var result WikipediaPageQueryJSON
err := c.fetch(&result, apiUrl)
if err != nil {
return nil, err
}
if len(result.Query.Search) == 0 {
return nil, nil
}
articles := make(map[int]Article)
for i, entry := range result.Query.Search {
articles[i] = Article{
Title: entry.Title,
//TODO use tea.Batch in update loop to send
// multiple API calls to get the page extract
// for each search result instead of using
// the snippet.
// Might be able to replace cleaning entirely with the "explaintext"
// https://www.mediawiki.org/wiki/Extension:TextExtracts
Description: CleanWikimediaHTML(entry.Snippet),
Content: "",
Url: fmt.Sprintf("%s/%s", c.WikiUrl, strings.ReplaceAll(entry.Title, " ", "_")),
}
}
return articles, nil
}
func (c *Client) LoadArticle(article Article) (Article, error) {
params := url.Values{}
params.Add("action", "query")
params.Add("formatversion", "2")
params.Add("prop", "revisions")
params.Add("rvprop", "content")
params.Add("rvslots", "*")
params.Add("titles", article.Title)
params.Add("format", "json")
apiUrl := c.ApiUrl + params.Encode()
var result WikipediaPageJSON
err := c.fetch(&result, apiUrl)
if err != nil {
return article, err
}
if len(result.Query.Pages) == 0 {
return article, errors.New("no pages found")
}
page := result.Query.Pages[0]
article.Title = page.Title
content := page.Revisions[0].Slots.Main.Content
article.Content = CleanWikimediaHTML(content)
return article, nil
}