scope/search/web/google.go

151 lines
3.7 KiB
Go

/*
* Scope - A simple and minimal metasearch engine
* Copyright (C) 2021 Arsen Musayelyan
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package web
import (
"net/http"
"net/url"
"strconv"
"github.com/PuerkitoBio/goquery"
)
var googleURL = urlMustParse("https://www.google.com/search")
// Google represents the Google search engine
type Google struct {
keyword string
userAgent string
page int
doc *goquery.Document
initDone bool
baseSel *goquery.Selection
}
// SetKeyword sets the keyword for searching
func (g *Google) SetKeyword(keyword string) {
g.keyword = keyword
}
// SetPage sets the page number for searching
func (g *Google) SetPage(page int) {
g.page = page * 10
}
// SetUserAgent sets the user agent for the request
func (g *Google) SetUserAgent(ua string) {
g.userAgent = ua
}
// Init runs requests for the Google search engine
func (g *Google) Init() error {
// Copy URL so that it can be changed
initURL := copyURL(googleURL)
// Get query parameters
query := initURL.Query()
// Set query
query.Set("q", g.keyword)
// Set starting result (page number)
query.Set("start", strconv.Itoa(g.page))
// Update URL query
initURL.RawQuery = query.Encode()
// Create new request for modified URL
req, err := http.NewRequest(
http.MethodGet,
initURL.String(),
nil,
)
if err != nil {
return err
}
// If user agent empty, use default
if g.userAgent == "" {
g.userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
}
// Set user agent of request
req.Header.Set("User-Agent", g.userAgent)
// Perform request
res, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer res.Body.Close()
// Create goquery document from reader
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return err
}
g.doc = doc
g.baseSel = doc.Find(`a > h3`)
g.initDone = true
return nil
}
// Each runs eachCb with the index of each search result
func (g *Google) Each(eachCb func(int) error) error {
for i := 0; i < g.baseSel.Length(); i++ {
err := eachCb(i)
if err != nil {
return err
}
}
return nil
}
// Title returns the title of the search result corresponding to i
func (g *Google) Title(i int) (string, error) {
return get(g.baseSel, i).Text(), nil
}
// Link returns the link to the search result corresponding to i
func (g *Google) Link(i int) (string, error) {
return get(g.baseSel, i).Parent().AttrOr("href", ""), nil
}
// Desc returns the description of the search result corresponding to i
func (g *Google) Desc(i int) (string, error) {
return get(g.baseSel, i).Parent().Parent().Next().Text(), nil
}
// Name returns "google"
func (g *Google) Name() string {
return "google"
}
// get gets an element and given index from given selection
func get(sel *goquery.Selection, i int) *goquery.Selection {
return sel.Slice(i, i+1)
}
// Parse url ignoring error
func urlMustParse(urlStr string) *url.URL {
out, _ := url.Parse(urlStr)
return out
}
// copyURL makes a copy of the url and returns it
func copyURL(orig *url.URL) *url.URL {
newURL := new(url.URL)
*newURL = *orig
return newURL
}