|
@@ -0,0 +1,965 @@
|
|
|
+package mokuai
|
|
|
+
|
|
|
+import (
|
|
|
+ "bytes"
|
|
|
+ "encoding/json"
|
|
|
+ "fmt"
|
|
|
+ "github.com/gin-gonic/gin"
|
|
|
+ "github.com/tidwall/gjson"
|
|
|
+ "golang.org/x/net/proxy"
|
|
|
+ "io"
|
|
|
+ "io/ioutil"
|
|
|
+ "math/big"
|
|
|
+ "net"
|
|
|
+ "net/http"
|
|
|
+ "net/url"
|
|
|
+ "os"
|
|
|
+ "strings"
|
|
|
+ "time"
|
|
|
+)
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlGet(url string) string {
|
|
|
+
|
|
|
+
|
|
|
+ client := &http.Client{Timeout: 20 * time.Second}
|
|
|
+ resp, err := client.Get(url)
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ return ""
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+ var buffer [512]byte
|
|
|
+ result := bytes.NewBuffer(nil)
|
|
|
+ for {
|
|
|
+ n, err := resp.Body.Read(buffer[0:])
|
|
|
+ result.Write(buffer[0:n])
|
|
|
+ if err != nil && err == io.EOF {
|
|
|
+ break
|
|
|
+ } else if err != nil {
|
|
|
+ panic(err)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return result.String()
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlGetByte(url string, outtime int) []byte {
|
|
|
+
|
|
|
+
|
|
|
+ if outtime < 1 {
|
|
|
+ outtime = 5
|
|
|
+ }
|
|
|
+ client := &http.Client{Timeout: time.Duration(outtime) * time.Second}
|
|
|
+ resp, err := client.Get(url)
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ return []byte("")
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+ var buffer [512]byte
|
|
|
+ result := bytes.NewBuffer(nil)
|
|
|
+ for {
|
|
|
+ n, err := resp.Body.Read(buffer[0:])
|
|
|
+ result.Write(buffer[0:n])
|
|
|
+ if err != nil && err == io.EOF {
|
|
|
+ break
|
|
|
+ } else if err != nil {
|
|
|
+ panic(err)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return result.Bytes()
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlPostJson_Str(url string, data interface{}, contentType string) string {
|
|
|
+
|
|
|
+
|
|
|
+ client := &http.Client{Timeout: 10 * time.Second}
|
|
|
+ jsonStr, _ := json.Marshal(data)
|
|
|
+ resp, err := client.Post(url, contentType, bytes.NewBuffer(jsonStr))
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+
|
|
|
+
|
|
|
+ result, _ := ioutil.ReadAll(resp.Body)
|
|
|
+ return string(result)
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlPostStr_Str(url string, data string, contentType string) string {
|
|
|
+
|
|
|
+
|
|
|
+ client := &http.Client{Timeout: 10 * time.Second}
|
|
|
+ resp, err := client.Post(url, contentType, bytes.NewBuffer([]byte(data)))
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+
|
|
|
+
|
|
|
+ result, _ := ioutil.ReadAll(resp.Body)
|
|
|
+ return string(result)
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlPostJson_Byte(url string, data interface{}, contentType string) []byte {
|
|
|
+
|
|
|
+
|
|
|
+ client := &http.Client{Timeout: 20 * time.Second}
|
|
|
+ jsonStr, _ := json.Marshal(data)
|
|
|
+ resp, err := client.Post(url, contentType, bytes.NewBuffer(jsonStr))
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ return []byte("")
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+ result, _ := ioutil.ReadAll(resp.Body)
|
|
|
+ return result
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_UrlPostStr_Byte(url string, data string, contentType string) []byte {
|
|
|
+
|
|
|
+
|
|
|
+ client := &http.Client{Timeout: 20 * time.Second}
|
|
|
+ resp, err := client.Post(url, contentType, bytes.NewBuffer([]byte(data)))
|
|
|
+ if err != nil {
|
|
|
+ panic(err)
|
|
|
+ }
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ resp.Body.Close()
|
|
|
+ }()
|
|
|
+ result, _ := ioutil.ReadAll(resp.Body)
|
|
|
+ return result
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_GetIPmaskByName(intername string) string {
|
|
|
+ byName, _ := net.InterfaceByName(intername)
|
|
|
+ addresses, _ := byName.Addrs()
|
|
|
+ for _, vv := range addresses {
|
|
|
+ if strings.IndexAny(vv.String(), ".") != -1 {
|
|
|
+ return vv.String()
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ return ""
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_GetIPByName(intername string) string {
|
|
|
+ byName, _ := net.InterfaceByName(intername)
|
|
|
+ addresses, _ := byName.Addrs()
|
|
|
+ for _, vv := range addresses {
|
|
|
+ if strings.IndexAny(vv.String(), ".") != -1 {
|
|
|
+ comma := strings.Index(vv.String(), "/")
|
|
|
+ return vv.String()[:comma]
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ return ""
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_DebugPostData(c *gin.Context) {
|
|
|
+ data, _ := c.GetRawData()
|
|
|
+ fmt.Printf("Post Data: %v\n", string(data))
|
|
|
+ c.Request.Body = ioutil.NopCloser(bytes.NewBuffer(data))
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_Getmyfwqip() (ipwai, ipnei string) {
|
|
|
+ addrs, err := net.InterfaceAddrs()
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ fmt.Println("Net_Getmyfwqip Err:", err)
|
|
|
+ os.Exit(1)
|
|
|
+ }
|
|
|
+
|
|
|
+ for _, address := range addrs {
|
|
|
+
|
|
|
+
|
|
|
+ if ipnet, ok := address.(*net.IPNet); ok && !ipnet.IP.IsLoopback() {
|
|
|
+ if ipnet.IP.To4() != nil {
|
|
|
+
|
|
|
+ if Net_IsPublicIP(ipnet.IP) == true {
|
|
|
+
|
|
|
+ ipwai = ipnet.IP.String()
|
|
|
+ } else if Net_Isothernet(ipnet.IP) == false {
|
|
|
+
|
|
|
+ ipnei = ipnet.IP.String()
|
|
|
+ } else {
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_IsPublicIP(IP net.IP) bool {
|
|
|
+ if IP.IsLoopback() || IP.IsLinkLocalMulticast() || IP.IsLinkLocalUnicast() {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ if ip4 := IP.To4(); ip4 != nil {
|
|
|
+ switch true {
|
|
|
+ case ip4[0] == 10:
|
|
|
+ return false
|
|
|
+ case ip4[0] == 172 && ip4[1] >= 16 && ip4[1] <= 31:
|
|
|
+ return false
|
|
|
+ case ip4[0] == 192 && ip4[1] == 168:
|
|
|
+ return false
|
|
|
+ default:
|
|
|
+ return true
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return false
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_Isothernet(IP net.IP) bool {
|
|
|
+ if ip4 := IP.To4(); ip4 != nil {
|
|
|
+ switch true {
|
|
|
+ case ip4[0] == 150:
|
|
|
+ if ip4[1] == 33 {
|
|
|
+ return true
|
|
|
+ } else {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ case ip4[0] == 169:
|
|
|
+ if ip4[1] == 254 {
|
|
|
+ return true
|
|
|
+ } else {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ default:
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_Getmyip_api() (ip, area string) {
|
|
|
+ s := Net_UrlGet("http://myip.zzznb.cc")
|
|
|
+ ip = gjson.Get(s, "ip").Str
|
|
|
+ if ip == "" {
|
|
|
+ res, _ := http.Get("http://tom.myip.top")
|
|
|
+ s, _ := ioutil.ReadAll(res.Body)
|
|
|
+ ip = gjson.Get(string(s), "ip").Str
|
|
|
+ }
|
|
|
+ if ip == "" {
|
|
|
+ res, _ := http.Get("http://ky.myip.top")
|
|
|
+ s, _ := ioutil.ReadAll(res.Body)
|
|
|
+ ip = gjson.Get(string(s), "ip").Str
|
|
|
+ }
|
|
|
+ area = gjson.Get(s, "country").Str + " " + gjson.Get(s, "province").Str + " " + gjson.Get(s, "city").Str + " " + gjson.Get(s, "isp").Str
|
|
|
+
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+使用HTTP代理GET访问(可以设置超时时间)
|
|
|
+urlstr 要访问的URL
|
|
|
+prxyurl 代理服务器地址
|
|
|
+outtime 超时时长,单位 秒
|
|
|
+
|
|
|
+返回
|
|
|
+body:接收的数据
|
|
|
+err:错误信息
|
|
|
+*/
|
|
|
+func Net_UrlProxyGet(urlstr, proxy string, outtime int) (body []byte, err error) {
|
|
|
+ urli := url.URL{}
|
|
|
+ var proxylin string
|
|
|
+ if strings.Index(proxy, "http://") == -1 {
|
|
|
+ proxylin = "http://" + proxy
|
|
|
+ } else {
|
|
|
+ proxylin = proxy
|
|
|
+ }
|
|
|
+
|
|
|
+ urlproxy, _ := urli.Parse(proxylin)
|
|
|
+ client := &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Proxy: http.ProxyURL(urlproxy),
|
|
|
+ },
|
|
|
+ }
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest("GET", urlstr, nil)
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer response.Body.Close()
|
|
|
+ }
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ return []byte(""), err
|
|
|
+ }
|
|
|
+
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ }()
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+使用Socks5代理GET访问
|
|
|
+urlstr 要访问的URL
|
|
|
+outtime 超时时长,单位 秒
|
|
|
+prxyurl 代理服务器地址
|
|
|
+
|
|
|
+返回
|
|
|
+body:接收的数据
|
|
|
+err:错误信息
|
|
|
+*/
|
|
|
+func Net_UrlProxyS5Get(urlstr string, outtime int, prxyurl, user, passwd string) (body []byte, err error) {
|
|
|
+
|
|
|
+ var authtmp proxy.Auth
|
|
|
+ authtmp.User = user
|
|
|
+ authtmp.Password = passwd
|
|
|
+ dialer, err := proxy.SOCKS5("tcp", prxyurl, &authtmp, proxy.Direct)
|
|
|
+ if err != nil {
|
|
|
+
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ client := &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Dial: dialer.Dial,
|
|
|
+ },
|
|
|
+ }
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest("GET", urlstr, nil)
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer response.Body.Close()
|
|
|
+ }
|
|
|
+
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ }()
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+使用HTTP代理POST访问(可以设置超时时间)
|
|
|
+urlstr 要访问的URL
|
|
|
+data POST数据
|
|
|
+prxyurl 代理服务器地址
|
|
|
+outtime 超时时长,单位 秒
|
|
|
+headers 协议头 如 headers := make(map[string]string)
|
|
|
+
|
|
|
+ headers["Content-Type"] = "application/json;charset=utf-8"
|
|
|
+ headers["token"] = token
|
|
|
+ headers["Connection"] = "keep-alive"
|
|
|
+ headers["Accept"] = "*\/*" 去掉\
|
|
|
+ headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36"
|
|
|
+ headers["X-Requested-With"] = "XMLHttpRequest"
|
|
|
+ headers["Referer"] = "http://www.xxxx.com/"
|
|
|
+
|
|
|
+返回
|
|
|
+body:接收的数据
|
|
|
+err:错误信息
|
|
|
+Recookies 返回的Cookies
|
|
|
+*/
|
|
|
+func Net_UrlProxyPost(urlstr, data, proxy string, outtime int, headers map[string]string) (body []byte, err error, Recookies []*http.Cookie) {
|
|
|
+ urli := url.URL{}
|
|
|
+ var proxylin string
|
|
|
+ if strings.Index(proxy, "http://") == -1 {
|
|
|
+ proxylin = "http://" + proxy
|
|
|
+ } else {
|
|
|
+ proxylin = proxy
|
|
|
+ }
|
|
|
+
|
|
|
+ urlproxy, _ := urli.Parse(proxylin)
|
|
|
+ client := &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Proxy: http.ProxyURL(urlproxy),
|
|
|
+ },
|
|
|
+ }
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest("POST", urlstr, bytes.NewReader([]byte(data)))
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ for key, header := range headers {
|
|
|
+ rqt.Header.Set(key, header)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer response.Body.Close()
|
|
|
+ }
|
|
|
+
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ }()
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ return []byte(""), err, nil
|
|
|
+ }
|
|
|
+ Recookies = response.Cookies()
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+使用HTTP代理GET访问 功能扩展版(可以设置超时时间、cookies、协议头)
|
|
|
+urlstr 要访问的URL
|
|
|
+prxyurl 代理服务器地址
|
|
|
+outtime 超时时长,单位 秒
|
|
|
+cookies 请求使用的Cookies,
|
|
|
+headers 协议头 如 headers := make(map[string]string)
|
|
|
+
|
|
|
+ headers["Content-Type"] = "application/json;charset=utf-8"
|
|
|
+ headers["token"] = token
|
|
|
+
|
|
|
+返回
|
|
|
+body:接收的数据
|
|
|
+err:错误信息
|
|
|
+*/
|
|
|
+func Net_UrlProxyGet_EX(urlstr, proxy string, cookies []*http.Cookie, outtime int, headers map[string]string) (body []byte, err error, Recookies []*http.Cookie) {
|
|
|
+ urli := url.URL{}
|
|
|
+ var proxylin string
|
|
|
+ if strings.Index(proxy, "http://") == -1 {
|
|
|
+ proxylin = "http://" + proxy
|
|
|
+ } else {
|
|
|
+ proxylin = proxy
|
|
|
+ }
|
|
|
+
|
|
|
+ urlproxy, _ := urli.Parse(proxylin)
|
|
|
+ client := &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Proxy: http.ProxyURL(urlproxy),
|
|
|
+ ResponseHeaderTimeout: time.Second * time.Duration(outtime),
|
|
|
+ },
|
|
|
+ }
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest("GET", urlstr, nil)
|
|
|
+ if err != nil {
|
|
|
+ fmt.Println("Get -2 >", err.Error())
|
|
|
+ return []byte(""), err, nil
|
|
|
+ }
|
|
|
+
|
|
|
+ if cookies != nil {
|
|
|
+ for i := 0; i < len(cookies); i++ {
|
|
|
+ rqt.AddCookie(cookies[i])
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if headers != nil {
|
|
|
+ for key, header := range headers {
|
|
|
+ rqt.Header.Set(key, header)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer response.Body.Close()
|
|
|
+ }
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ fmt.Println("Get -3 >", err.Error())
|
|
|
+ return
|
|
|
+ } else {
|
|
|
+ Recookies = response.Cookies()
|
|
|
+
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ }
|
|
|
+
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+
|
|
|
+ }()
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_CookieStrToCook(str string) (CookieZu []*http.Cookie) {
|
|
|
+ s1 := strings.Split(str, ";")
|
|
|
+ for i,v := range s1 {
|
|
|
+ comma := strings.Index(str, ";")
|
|
|
+ name := str[:comma]
|
|
|
+ value := str[comma+len(";"):]
|
|
|
+ c := &http.Cookie{
|
|
|
+ Name: name,
|
|
|
+ Value: value,
|
|
|
+ Raw: line,
|
|
|
+ }
|
|
|
+ if name != "" {
|
|
|
+ CookieZu
|
|
|
+ }
|
|
|
+ }
|
|
|
+}*/
|
|
|
+
|
|
|
+
|
|
|
+func Net_CookieCookToStr(CookieZu []*http.Cookie) string {
|
|
|
+ lin := ""
|
|
|
+ for _, v := range CookieZu {
|
|
|
+ if v.Raw != "" {
|
|
|
+ lin = lin + v.Raw + ";"
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return lin
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+ Net_Url_EX
|
|
|
+
|
|
|
+使用HTTP访问(可以设置代理/超时时间/Cookie/请求类型/协议头等) . V 1.0.2 2022.4.8
|
|
|
+methed [选]
|
|
|
+urlstr [必]要访问的URL
|
|
|
+data [选]POST数据
|
|
|
+proxy [选]代理服务器地址 . 为空则不使用代理 。 账密代理格式 {proxyUrl:proxyPort}@{proxyUser}:{proxyPass} 例如 127.0.0.1:8888@user:password; 仅代理格式 {proxyUrl:proxyPort} 例如 127.0.0.1:8888
|
|
|
+cookies [选]传入访问使用的Cookies . 为空则不使用
|
|
|
+outtime [选]超时时长,单位 秒 ,默认6秒
|
|
|
+headers [选]协议头 如 headers := make(map[string]string)
|
|
|
+
|
|
|
+ headers["Content-Type"] = "application/json;charset=utf-8"
|
|
|
+ headers["token"] = token
|
|
|
+ headers["Connection"] = "keep-alive"
|
|
|
+ headers["Accept"] = "*\/*" 去掉\
|
|
|
+ headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36"
|
|
|
+ headers["X-Requested-With"] = "XMLHttpRequest"
|
|
|
+ headers["Referer"] = "http://www.xxxx.com/"
|
|
|
+
|
|
|
+autoredirect [选]是否自动重定向, 1:自动处理 ; 2.不处理 ; 默认为0:自动处理
|
|
|
+返回
|
|
|
+body:接收的数据 , (字符串方式 string(body))
|
|
|
+err:错误信息
|
|
|
+Recookies 返回的Cookies 。 可以配合 Net_CookieCookToStr 使用
|
|
|
+*/
|
|
|
+func Net_Url_EX(methed, urlstr, data, proxy, cookies string, outtime int, headers map[string]string, autoredirect int) (body []byte, err error, ReCookies string, ReHeader http.Header) {
|
|
|
+ if methed == "" || len(methed) < 2 {
|
|
|
+ methed = "GET"
|
|
|
+ }
|
|
|
+ if outtime < 1 {
|
|
|
+ outtime = 6
|
|
|
+ }
|
|
|
+
|
|
|
+ var client *http.Client
|
|
|
+ if proxy == "" {
|
|
|
+ client = &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+
|
|
|
+ urli := url.URL{}
|
|
|
+ var proxyUrl, proxyUser, proxyPass string
|
|
|
+ if !strings.Contains(proxy, "http") {
|
|
|
+ proxy = fmt.Sprintf("http://%s", proxy)
|
|
|
+ }
|
|
|
+ if strings.Index(proxy, "@") == -1 {
|
|
|
+ proxyUrl = proxy
|
|
|
+ } else {
|
|
|
+ comma := strings.Index(proxy, "@")
|
|
|
+ proxyUrl = proxy[:comma]
|
|
|
+ lin := proxy[comma+len("@"):]
|
|
|
+ if len(lin) > 0 {
|
|
|
+ if strings.Index(lin, ":") == -1 {
|
|
|
+ proxyUser = lin
|
|
|
+ proxyPass = ""
|
|
|
+ } else {
|
|
|
+ comma = strings.Index(lin, ":")
|
|
|
+ proxyUser = lin[:comma]
|
|
|
+ proxyPass = lin[comma+len(":"):]
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ urlProxy, _ := urli.Parse(proxyUrl)
|
|
|
+ if proxyUser != "" && proxyPass != "" {
|
|
|
+ urlProxy.User = url.UserPassword(proxyUser, proxyPass)
|
|
|
+ }
|
|
|
+ client = &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Proxy: http.ProxyURL(urlProxy),
|
|
|
+ },
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest(methed, urlstr, bytes.NewReader([]byte(data)))
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ if len(headers) == 0 {
|
|
|
+ rqt.Header.Add("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36")
|
|
|
+ } else {
|
|
|
+ for key, header := range headers {
|
|
|
+ rqt.Header.Set(key, header)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if len(cookies) > 1 {
|
|
|
+ rqt.Header.Add("Cookie", cookies)
|
|
|
+ }
|
|
|
+
|
|
|
+ if autoredirect == 2 {
|
|
|
+ client.CheckRedirect = func(rqt *http.Request, via []*http.Request) error {
|
|
|
+ return http.ErrUseLastResponse
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer func(Body io.ReadCloser) {
|
|
|
+ err := Body.Close()
|
|
|
+ if err != nil {
|
|
|
+ fmt.Println("client.Do Err > " + err.Error())
|
|
|
+ }
|
|
|
+ }(response.Body)
|
|
|
+ }
|
|
|
+
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ }()
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ return []byte(""), err, "", nil
|
|
|
+ }
|
|
|
+ cooklin := response.Cookies()
|
|
|
+ for _, v := range cooklin {
|
|
|
+ ReCookies = ReCookies + v.Raw + ";"
|
|
|
+ }
|
|
|
+
|
|
|
+ ReHeader = response.Header.Clone()
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_CookieAndUp(oldCookies, newCookies string) string {
|
|
|
+
|
|
|
+ linzu := strings.Split(oldCookies, ";")
|
|
|
+ linzu1 := strings.Split(newCookies, ";")
|
|
|
+ lin := ""
|
|
|
+ for _, v := range linzu {
|
|
|
+ n := -1
|
|
|
+ comma := strings.Index(v, "=")
|
|
|
+ for i1, v1 := range linzu1 {
|
|
|
+ if strings.Index(v, "=") != -1 && strings.Index(v1, "=") != -1 {
|
|
|
+ comma1 := strings.Index(v1, "=")
|
|
|
+ if v[:comma] == v1[:comma1] {
|
|
|
+ lin = lin + v1 + ";"
|
|
|
+ linzu1[i1] = ""
|
|
|
+ n = 1
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if n == -1 {
|
|
|
+ lin = lin + v
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ for _, v := range linzu1 {
|
|
|
+ if v != "" {
|
|
|
+ lin = lin + v + ";"
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return lin
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_GetDomain(url string) (domain string) {
|
|
|
+ var lin string
|
|
|
+ if strings.Index(url, "http://") != -1 {
|
|
|
+ comma := strings.Index(url, "http://")
|
|
|
+ lin = url[comma+len("http://"):]
|
|
|
+ } else if strings.Index(url, "https://") != -1 {
|
|
|
+ comma := strings.Index(url, "https://")
|
|
|
+ lin = url[comma+len("https://"):]
|
|
|
+ } else {
|
|
|
+ lin = url
|
|
|
+ }
|
|
|
+
|
|
|
+ if strings.Index(lin, "/") != -1 {
|
|
|
+ comma := strings.Index(lin, "/")
|
|
|
+ lin = lin[:comma]
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ if strings.Index(lin, ":") != -1 {
|
|
|
+ comma := strings.Index(lin, ":")
|
|
|
+ lin = lin[:comma]
|
|
|
+ }
|
|
|
+
|
|
|
+ return lin
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_IPNtoA(ip int64) string {
|
|
|
+ return fmt.Sprintf("%d.%d.%d.%d",
|
|
|
+ byte(ip>>24), byte(ip>>16), byte(ip>>8), byte(ip))
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_IPAtoN(ip string) int64 {
|
|
|
+ ret := big.NewInt(0)
|
|
|
+ ret.SetBytes(net.ParseIP(ip).To4())
|
|
|
+ return ret.Int64()
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+func Net_downloadFile(url string, filepath string) (bool, error) {
|
|
|
+
|
|
|
+
|
|
|
+ resp, err := http.Get(url)
|
|
|
+ if err != nil {
|
|
|
+ return false, err
|
|
|
+ }
|
|
|
+ defer resp.Body.Close()
|
|
|
+
|
|
|
+
|
|
|
+ out, err := os.Create(filepath)
|
|
|
+ if err != nil {
|
|
|
+ return false, err
|
|
|
+ }
|
|
|
+ defer out.Close()
|
|
|
+
|
|
|
+
|
|
|
+ _, err = io.Copy(out, resp.Body)
|
|
|
+ if err != nil {
|
|
|
+ return false, err
|
|
|
+ }
|
|
|
+
|
|
|
+ return true, nil
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func Net_GetUrlFileName(url string) (fileName string) {
|
|
|
+ lin := url
|
|
|
+ for true {
|
|
|
+ if strings.Index(lin, "/") != -1 {
|
|
|
+ comma := strings.Index(lin, "/")
|
|
|
+ lin = lin[comma+len("/"):]
|
|
|
+ } else {
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if strings.Index(lin, "?") != -1 {
|
|
|
+ comma := strings.Index(lin, "?")
|
|
|
+ lin = lin[:comma]
|
|
|
+ }
|
|
|
+ fileName = lin
|
|
|
+ return
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+ Net_Url_S5_EX
|
|
|
+
|
|
|
+使用Socks5访问(可以设置代理/超时时间/Cookie/请求类型/协议头等) . V 1.0.0 2022.7.14
|
|
|
+methed [选]
|
|
|
+urlstr [必]要访问的URL
|
|
|
+data [选]POST数据
|
|
|
+proxy [选]代理服务器地址 . 为空则不使用代理 。 账密代理格式 {proxyUrl:proxyPort}@{proxyUser}:{proxyPass} 例如 127.0.0.1:8888@user:password; 仅代理格式 {proxyUrl:proxyPort} 例如 127.0.0.1:8888
|
|
|
+cookies [选]传入访问使用的Cookies . 为空则不使用
|
|
|
+outtime [选]超时时长,单位 秒 ,默认6秒
|
|
|
+headers [选]协议头 如 headers := make(map[string]string)
|
|
|
+
|
|
|
+ headers["Content-Type"] = "application/json;charset=utf-8"
|
|
|
+ headers["token"] = token
|
|
|
+ headers["Connection"] = "keep-alive"
|
|
|
+ headers["Accept"] = "*\/*" 去掉\
|
|
|
+ headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36"
|
|
|
+ headers["X-Requested-With"] = "XMLHttpRequest"
|
|
|
+ headers["Referer"] = "http://www.xxxx.com/"
|
|
|
+
|
|
|
+autoredirect [选]是否自动重定向, 1:自动处理 ; 2.不处理 ; 默认为0:自动处理
|
|
|
+返回
|
|
|
+body:接收的数据 , (字符串方式 string(body))
|
|
|
+err:错误信息
|
|
|
+Recookies 返回的Cookies 。 可以配合 Net_CookieCookToStr 使用
|
|
|
+*/
|
|
|
+func Net_Url_S5_EX(methed, urlstr, data, proxy, cookies string, outtime int, headers map[string]string, autoredirect int) (body []byte, err error, ReCookies string, ReHeader http.Header) {
|
|
|
+ if methed == "" || len(methed) < 2 {
|
|
|
+ methed = "GET"
|
|
|
+ }
|
|
|
+ if outtime < 1 {
|
|
|
+ outtime = 6
|
|
|
+ }
|
|
|
+
|
|
|
+ var client *http.Client
|
|
|
+ if proxy == "" {
|
|
|
+ client = &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+
|
|
|
+ urli := url.URL{}
|
|
|
+ var proxyUrl, proxyUser, proxyPass string
|
|
|
+ if !strings.Contains(proxy, "socks5") {
|
|
|
+ proxy = fmt.Sprintf("socks5://%s", proxy)
|
|
|
+ }
|
|
|
+ if strings.Index(proxy, "@") == -1 {
|
|
|
+ proxyUrl = proxy
|
|
|
+ } else {
|
|
|
+ comma := strings.Index(proxy, "@")
|
|
|
+ proxyUrl = proxy[:comma]
|
|
|
+ lin := proxy[comma+len("@"):]
|
|
|
+ if len(lin) > 0 {
|
|
|
+ if strings.Index(lin, ":") == -1 {
|
|
|
+ proxyUser = lin
|
|
|
+ proxyPass = ""
|
|
|
+ } else {
|
|
|
+ comma = strings.Index(lin, ":")
|
|
|
+ proxyUser = lin[:comma]
|
|
|
+ proxyPass = lin[comma+len(":"):]
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ urlProxy, _ := urli.Parse(proxyUrl)
|
|
|
+ if proxyUser != "" && proxyPass != "" {
|
|
|
+ urlProxy.User = url.UserPassword(proxyUser, proxyPass)
|
|
|
+ }
|
|
|
+ client = &http.Client{
|
|
|
+ Timeout: time.Second * time.Duration(outtime),
|
|
|
+ Transport: &http.Transport{
|
|
|
+ Proxy: http.ProxyURL(urlProxy),
|
|
|
+ },
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ var rqt *http.Request
|
|
|
+ rqt, err = http.NewRequest(methed, urlstr, bytes.NewReader([]byte(data)))
|
|
|
+ if err != nil {
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ if len(headers) == 0 {
|
|
|
+ rqt.Header.Add("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4664.110 Safari/537.36")
|
|
|
+ } else {
|
|
|
+ for key, header := range headers {
|
|
|
+ rqt.Header.Set(key, header)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if len(cookies) > 1 {
|
|
|
+ rqt.Header.Add("Cookie", cookies)
|
|
|
+ }
|
|
|
+
|
|
|
+ if autoredirect == 2 {
|
|
|
+ client.CheckRedirect = func(rqt *http.Request, via []*http.Request) error {
|
|
|
+ return http.ErrUseLastResponse
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ var response *http.Response
|
|
|
+ response, err = client.Do(rqt)
|
|
|
+ if response != nil {
|
|
|
+ defer func(Body io.ReadCloser) {
|
|
|
+ err := Body.Close()
|
|
|
+ if err != nil {
|
|
|
+ fmt.Println("client.Do Err > " + err.Error())
|
|
|
+ }
|
|
|
+ }(response.Body)
|
|
|
+ }
|
|
|
+
|
|
|
+ defer func() {
|
|
|
+ if e := recover(); e != nil {
|
|
|
+ fmt.Printf("Panicing %s\r\n", e)
|
|
|
+ }
|
|
|
+ }()
|
|
|
+
|
|
|
+ if err != nil {
|
|
|
+ return []byte(""), err, "", nil
|
|
|
+ }
|
|
|
+ cooklin := response.Cookies()
|
|
|
+ for _, v := range cooklin {
|
|
|
+ ReCookies = ReCookies + v.Raw + ";"
|
|
|
+ }
|
|
|
+
|
|
|
+ ReHeader = response.Header.Clone()
|
|
|
+ body, err = ioutil.ReadAll(response.Body)
|
|
|
+ return
|
|
|
+}
|