-
Notifications
You must be signed in to change notification settings - Fork 0
/
client.go
60 lines (48 loc) · 1.24 KB
/
client.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
package scour
import (
"errors"
"net/http"
"net/url"
)
type Client struct {
*http.Client
req *http.Request
resp *http.Response
}
// Create a new Client with and embedded
// http.Client type
func NewClient() *Client {
return &Client{&http.Client{}, &http.Request{}, &http.Response{}}
}
// Request is used to initiate a client request
// and return a crawler object
func (c *Client) Request(method string, url string) (ret *Crawler, err error) {
c.req, err = http.NewRequest(method, url, nil)
if err != nil {
return nil, err
}
c.resp, err = c.Do(c.req)
if err != nil {
return nil, err
}
crawler := NewCrawler(c.resp)
return crawler, nil
}
// Click is a convenience function simulating a client browser link click
// Returns a scour crawler object
func (c *Client) Click(l *Link) (ret *Crawler, err error) {
if l == nil {
return nil, errors.New("Can't click without a link!")
}
return c.Request(l.Method(), l.Url())
}
// SetProxy is a convenience function for setting
// a proxy url used by the client object
func (c *Client) SetProxy(proxy_str string) error {
proxy, err := url.Parse(proxy_str)
if err != nil {
return errors.New("Problem parsing proxy url")
}
c.Transport = &http.Transport{Proxy: http.ProxyURL(proxy)}
return nil
}