本文实例讲述了Go语言通过http抓取网页的方法。分享给大家供大家参考。具体实现方法如下:
复制代码 代码如下:
package main
import (
"fmt"
"log"
"net/http"
"net/url"
"io/ioutil"
)
//指定代理ip
func getTransportFieldURL(proxy_addr *string) (transport *http.Transport) {
url_i := url.URL{}
url_proxy, _ := url_i.Parse(*proxy_addr)
transport = &http.Transport{Proxy : http.ProxyURL(url_proxy)}
return
}
//从环境变量$http_proxy或$HTTP_PROXY中获取HTTP代理地址
func getTransportFromEnvironment() (transport *http.Transport) {
transport = &http.Transport{Proxy : http.ProxyFromEnvironment}
return
}
func fetch(url , proxy_addr *string) (html string) {
transport := getTransportFieldURL(proxy_addr)
client := &http.Client{Transport : transport}
req, err := http.NewRequest("GET", *url, nil)
if err != nil {
log.Fatal(err.Error())
}
resp, err := client.Do(req)
if err != nil {
log.Fatal(err.Error())
}
if resp.StatusCode == 200 {
robots, err := ioutil.ReadAll(resp.Body);
resp.Body.Close()
if err != nil {
log.Fatal(err.Error())
}
html = string(robots);
} else {
html = ""
}
return
}
func main() {
proxy_addr := "http://183.221.250.137:80/"
url := "http://www.baidu.com/s"
html := fetch(&url, &proxy_addr)
fmt.Println(html)
}
希望本文所述对大家的Go语言程序设计有所帮助。