commit 60c7d62824a777fe876a953a19339df17f2ca113 Author: huangsimin Date: Thu Dec 19 11:51:56 2019 +0800 v1.0.0 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..294be7a --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +*.pyc +*.vscode diff --git a/base.go b/base.go new file mode 100644 index 0000000..a49e3ab --- /dev/null +++ b/base.go @@ -0,0 +1,51 @@ +package requests + +import ( + "bytes" + "errors" + "net/http" + "reflect" +) + +func buildBodyRequest(wf *Workflow) *http.Request { + var req *http.Request + var err error + contentType := "" + + if wf.Body.GetIOBody() == nil { + req, err = http.NewRequest(wf.Method, wf.GetRawURL(), nil) + } else { + var bodybuf *bytes.Buffer + switch wf.Body.GetIOBody().(type) { + case []byte: + bodybuf = bytes.NewBuffer(wf.Body.GetIOBody().([]byte)) + case string: + bodybuf = bytes.NewBuffer([]byte(wf.Body.GetIOBody().(string))) + case *bytes.Buffer: + bodybuf = bytes.NewBuffer(wf.Body.GetIOBody().(*bytes.Buffer).Bytes()) + default: + panic(errors.New("the type is not exist, type is " + reflect.TypeOf(wf.Body.GetIOBody()).String())) + } + req, err = http.NewRequest(wf.Method, wf.GetRawURL(), bodybuf) + } + + if err != nil { + panic(err) + } + + if wf.Body.ContentType() != "" { + contentType = wf.Body.ContentType() + } else { + contentType = "" + if wf.Method == "POST" || wf.Method == "PUT" || wf.Method == "PATCH" { + contentType = TypeURLENCODED + } + } + + if contentType != "" { + req.Header.Set(HeaderKeyContentType, contentType) + } + + return req + +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..a032110 --- /dev/null +++ b/go.mod @@ -0,0 +1,13 @@ +module requests + +go 1.13 + +require ( + github.com/474420502/gjson v1.1.3 + github.com/elazarl/goproxy v0.0.0-20190711103511-473e67f1d7d2 + github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 // indirect + github.com/tidwall/match v1.0.1 // indirect + golang.org/x/net v0.0.0-00010101000000-000000000000 +) + +replace golang.org/x/net => github.com/golang/net v0.0.0-20190902185345-cdfb69ac37fc6fa907650654115ebebb3aae2087 diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..28490c7 --- /dev/null +++ b/go.sum @@ -0,0 +1,11 @@ +github.com/474420502/gjson v1.1.3 h1:rQxKNSFS8bM5iVVKKb9EHY1SS2k+EhzVNUXe2xSGn8o= +github.com/474420502/gjson v1.1.3/go.mod h1:mdAOevjPYIFWOE8CpejPHwoJCz96oNnuwhjhrAVeKaY= +github.com/elazarl/goproxy v0.0.0-20190711103511-473e67f1d7d2 h1:aZtFdDNWY/yH86JPR2WX/PN63635VsE/f/nXNPAbYxY= +github.com/elazarl/goproxy v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 h1:dWB6v3RcOy03t/bUadywsbyrQwCqZeNIEX6M1OtSZOM= +github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8= +github.com/golang/net v0.0.0-20190902185345-cdfb69ac37fc6fa907650654115ebebb3aae2087 h1:haK1T12C0CO79KUdu+ZzLL9+l9BwM9PRkd2/mQqdg8E= +github.com/golang/net v0.0.0-20190902185345-cdfb69ac37fc6fa907650654115ebebb3aae2087/go.mod h1:98y8FxUyMjTdJ5eOj/8vzuiVO14/dkJ98NYhEPG8QGY= +github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc= +github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc= +github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= diff --git a/init_test.go b/init_test.go new file mode 100644 index 0000000..d51eec8 --- /dev/null +++ b/init_test.go @@ -0,0 +1,29 @@ +package requests + +import ( + "log" + "net/http" + "os/exec" + "time" + + "github.com/elazarl/goproxy" +) + +const ProxyAddress = "localhost:58080" + +func init() { + log.SetFlags(log.Lshortfile | log.LstdFlags) + go func() { + proxy := goproxy.NewProxyHttpServer() + proxy.Verbose = true + http.ListenAndServe(ProxyAddress, proxy) + }() + + cmd := exec.Command("/bin/bash", "-c", "docker ps | grep httpbin") + _, err := cmd.Output() + if err != nil { + log.Println("recommend 1. docker run -p 80:80 kennethreitz/httpbin \n2. echo \"127.0.0.1 httpbin.org\" >> /etc/hosts") + } + + time.Sleep(time.Millisecond * 100) +} diff --git a/multipart.go b/multipart.go new file mode 100644 index 0000000..5da1a64 --- /dev/null +++ b/multipart.go @@ -0,0 +1,110 @@ +package requests + +import ( + "bytes" + "io" + "log" + "mime/multipart" + "net/url" + "strconv" +) + +func writeFormUploadFile(mwriter *multipart.Writer, ufile *UploadFile) { + part, err := mwriter.CreateFormFile(ufile.FieldName, ufile.FileName) + if err != nil { + log.Panic(err) + } + io.Copy(part, ufile.FileReaderCloser) + + err = ufile.FileReaderCloser.Close() + if err != nil { + panic(err) + } +} + +func createMultipart(postParams IBody, params []interface{}) { + plen := len(params) + + body := &bytes.Buffer{} + mwriter := multipart.NewWriter(body) + + for _, iparam := range params[0 : plen-1] { + switch param := iparam.(type) { + case *UploadFile: + if param.FieldName == "" { + param.FieldName = "file0" + } + writeFormUploadFile(mwriter, param) + case UploadFile: + if param.FieldName == "" { + param.FieldName = "file0" + } + writeFormUploadFile(mwriter, ¶m) + case []*UploadFile: + for i, p := range param { + if p.FieldName == "" { + p.FieldName = "file" + strconv.Itoa(i) + } + writeFormUploadFile(mwriter, p) + } + case []UploadFile: + for i, p := range param { + if p.FieldName == "" { + p.FieldName = "file" + strconv.Itoa(i) + } + writeFormUploadFile(mwriter, &p) + } + case string: + uploadFiles, err := UploadFileFromGlob(param) + if err != nil { + log.Println(err) + } else { + for i, p := range uploadFiles { + if p.FieldName == "" { + p.FieldName = "file" + strconv.Itoa(i) + } + writeFormUploadFile(mwriter, p) + } + } + + case []string: + for i, glob := range param { + uploadFiles, err := UploadFileFromGlob(glob) + if err != nil { + log.Println(err) + } else { + for ii, p := range uploadFiles { + if p.FieldName == "" { + p.FieldName = "file" + strconv.Itoa(i) + "_" + strconv.Itoa(ii) + } + writeFormUploadFile(mwriter, p) + } + } + } + case map[string]string: + for k, v := range param { + mwriter.WriteField(k, v) + } + case map[string][]string: + for k, vs := range param { + for _, v := range vs { + mwriter.WriteField(k, v) + } + } + case url.Values: + for k, vs := range param { + for _, v := range vs { + mwriter.WriteField(k, v) + } + } + } + } + + postParams.AddContentType(mwriter.FormDataContentType()) + postParams.SetIOBody(body) + + err := mwriter.Close() + if err != nil { + panic(err) + } +} diff --git a/response.go b/response.go new file mode 100644 index 0000000..fcc9363 --- /dev/null +++ b/response.go @@ -0,0 +1,86 @@ +package requests + +import ( + "bytes" + "compress/gzip" + "compress/zlib" + "io" + "io/ioutil" + "net/http" +) + +// Response 响应内容包含http.Response 已读 +type Response struct { + readContent string + readResponse *http.Response +} + +// FromHTTPResponse 生成Response 从标准http.Response +func FromHTTPResponse(resp *http.Response, isDecompressNoAccept bool) (*Response, error) { + var err error + // 复制response 返回内容 并且测试是否有解压的需求 + srcbuf, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + resp.Body.Close() + + content := "" + + if isDecompressNoAccept { // 在某个已经遗忘的网页测试过, 为了兼容 Python requests + srcReader := bytes.NewReader(srcbuf) + var reader io.ReadCloser + if reader, err = gzip.NewReader(srcReader); err == nil { + defer reader.Close() + buf, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + content = string(buf) + } else if reader, err = zlib.NewReader(srcReader); err == nil { + defer reader.Close() + buf, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + content = string(buf) + } else { + content = string(srcbuf) + } + + } else { + content = string(srcbuf) + } + + return &Response{readContent: content, readResponse: resp}, nil +} + +// Content 返回解压后的内容 +func (gresp *Response) Content() string { + return gresp.readContent +} + +// GetSrcResponse 获取原生golang http.Response +func (gresp *Response) GetSrcResponse() *http.Response { + return gresp.readResponse +} + +// GetStatue 获取Statue String +func (gresp *Response) GetStatue() string { + return gresp.readResponse.Status +} + +// GetStatueCode 获取Statue int +func (gresp *Response) GetStatueCode() int { + return gresp.readResponse.StatusCode +} + +// GetHeader Header map[string][]string +func (gresp *Response) GetHeader() http.Header { + return gresp.readResponse.Header +} + +// GetContentLength 获取Content的内容长度, 如果存在 IsDecompressNoAccept 可能是压缩级别的长度, 非GetContent长度 +func (gresp *Response) GetContentLength() int64 { + return gresp.readResponse.ContentLength +} diff --git a/response_test.go b/response_test.go new file mode 100644 index 0000000..b2524d4 --- /dev/null +++ b/response_test.go @@ -0,0 +1,62 @@ +package requests + +import ( + "net/http" + "testing" + + "github.com/474420502/gjson" +) + +func TestFromHTTPResponse(t *testing.T) { + + var gresp *http.Response + var err error + gresp, err = http.DefaultClient.Get("http://httpbin.org/get") + if err != nil { + t.Error(err) + } + resp, err := FromHTTPResponse(gresp, false) + if err != nil { + t.Error(err) + } + + if gjson.Get(resp.Content(), "headers.Host").String() != "httpbin.org" { + t.Error("headers.Host != httpbin.org ?") + } + + if resp.GetSrcResponse().StatusCode != 200 { + t.Error("StatusCode != 200") + } + + if len(resp.GetSrcResponse().Header) == 0 { + t.Error("esp.GetSrcResponse().Header == nil") + } + + if resp.GetStatue() != "200 OK" || resp.GetStatueCode() != 200 { + t.Error(" resp.GetStatue() != 200 OK") + } + + if len(resp.GetHeader()["Content-Length"]) != 1 { + t.Error("resp.GetHeader() is error ?") + } + + if int64(len(resp.Content())) != resp.GetContentLength() { + t.Error("content len is not equal") + } +} + +func TestResponseDeflate(t *testing.T) { + ses := NewSession() + if wf := ses.Get("http://httpbin.org/get"); wf != nil { + wf.AddHeader("accept-encoding", "deflate") + resp, err := wf.Execute() + if err != nil { + t.Error(err) + } else { + if gjson.Get(resp.Content(), "headers.Accept-Encoding").String() != "deflate" { + t.Error("Accept-Encoding != deflate ?") + } + } + } + +} diff --git a/session.go b/session.go new file mode 100644 index 0000000..1d28ee4 --- /dev/null +++ b/session.go @@ -0,0 +1,394 @@ +package requests + +import ( + "crypto/tls" + "errors" + "net/http" + "net/http/cookiejar" + "net/url" + "reflect" + "runtime" + "strings" + "time" + + "golang.org/x/net/publicsuffix" +) + +// Body 相关参数结构 +type Body struct { + // Query map[string][]string + ioBody interface{} + // prefix ContentType 前缀 + prefix string + // Files []UploadFile + contentTypes map[string]int +} + +// NewBody new body pointer +func NewBody() *Body { + b := &Body{} + b.contentTypes = make(map[string]int) + return b +} + +// SetIOBody 设置IOBody的值 +func (body *Body) SetIOBody(iobody interface{}) { + body.ioBody = iobody +} + +// GetIOBody 获取ioBody值 +func (body *Body) GetIOBody() interface{} { + return body.ioBody +} + +// ContentType 获取ContentType +func (body *Body) ContentType() string { + content := body.prefix + for kvalue := range body.contentTypes { + content += kvalue + ";" + } + return strings.TrimRight(content, ";") +} + +// SetPrefix SetPrefix 和 AddContentType的顺序会影响到ContentType()的返回结果 +func (body *Body) SetPrefix(ct string) { + body.prefix = strings.TrimRight(ct, ";") + ";" +} + +// AddContentType 添加 Add Type类型 +func (body *Body) AddContentType(ct string) { + for _, v := range strings.Split(ct, ";") { + v = strings.Trim(v, " ") + if v != "" { + if body.prefix != v { + body.contentTypes[v] = 1 + } + } + } + +} + +// IBody 相关参数结构 +type IBody interface { + // GetIOBody 获取iobody data + GetIOBody() interface{} + // SetIOBody 设置iobody data + SetIOBody(iobody interface{}) + // ContentType 返回包括 Prefix 所有的ContentType + ContentType() string + // AppendContent + AddContentType(ct string) + // SetPrefix 设置 Prefix; 唯一前缀; 就是ContentType的第一个, ContentType(Prefix);ContentType;ContentType + SetPrefix(ct string) +} + +// BasicAuth 帐号认真结构 +type BasicAuth struct { + // User 帐号 + User string + // Password 密码 + Password string +} + +// IsSetting 是否设置的一些情景 +type IsSetting struct { + isDecompressNoAccept bool +} + +// Session 的基本方法 +type Session struct { + auth *BasicAuth + + body IBody + + client *http.Client + cookiejar http.CookieJar + + transport *http.Transport + + Header http.Header + Query url.Values + + Is IsSetting +} + +const ( + // TypeJSON 类型 + TypeJSON = "application/json" + + // TypeXML 类型 + TypeXML = "text/xml" + + // TypePlain 类型 + TypePlain = "text/plain" + + // TypeHTML 类型 + TypeHTML = "text/html" + + // TypeURLENCODED 类型 + TypeURLENCODED = "application/x-www-form-urlencoded" + + // TypeForm PostForm类型 + TypeForm = TypeURLENCODED + + // TypeStream application/octet-stream 只能提交一个二进制流, 很少用 + TypeStream = "application/octet-stream" + + // TypeFormData 类型 Upload File 支持path(string) 自动转换成UploadFile + TypeFormData = "multipart/form-data" + + // TypeMixed Mixed类型 + TypeMixed = "multipart/mixed" + + // HeaderKeyHost Host + HeaderKeyHost = "Host" + + // HeaderKeyUA User-Agent + HeaderKeyUA = "User-Agent" + + // HeaderKeyContentType Content-Type + HeaderKeyContentType = "Content-Type" +) + +// TypeConfig 配置类型 +type TypeConfig int + +const ( + _ TypeConfig = iota + // CRequestTimeout request 包括 dial request redirect 总时间超时 + CRequestTimeout // 支持time.Duration 和 int(秒为单位) + + // CDialTimeout 一个Connect过程的Timeout + CDialTimeout // 支持time.Duration 和 int(秒为单位) + + // CKeepAlives 默认KeepAlives false, 如果默认为true容易被一直KeepAlives, 没关闭链接 + CKeepAlives + + // CProxy 代理链接 + CProxy // http, https, socks5 + + // CInsecure InsecureSkipVerify + CInsecure // true, false + + // CBasicAuth 帐号认证 + CBasicAuth // user pwd + + // CTLS 帐号认证 + CTLS // user pwd + + // CIsWithCookiejar 持久化 CookieJar true or false ; default = true + CIsWithCookiejar + + // CIsDecompressNoAccept 解压 当response header 不存在 Accept-Encoding + // 很多特殊情景会不返回Accept-Encoding: Gzip. 如 不按照标准的网站 + CIsDecompressNoAccept +) + +// NewSession 创建Session +func NewSession() *Session { + client := &http.Client{} + transport := &http.Transport{DisableCompression: true, DisableKeepAlives: true} + + EnsureTransporterFinalized(transport) + + client.Transport = transport + cjar, err := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List}) + if err != nil { + panic(err) + } + + client.Jar = cjar + return &Session{client: client, body: NewBody(), transport: transport, auth: nil, cookiejar: client.Jar, Header: make(http.Header), Is: IsSetting{false}} +} + +// SetConfig 设置配置 +func (ses *Session) SetConfig(typeConfig TypeConfig, values interface{}) { + + switch typeConfig { + case CRequestTimeout: + switch v := values.(type) { + case time.Duration: + ses.client.Timeout = v + case int: + ses.client.Timeout = time.Duration(v * int(time.Second)) + case int64: + ses.client.Timeout = time.Duration(v * int64(time.Second)) + case float32: + ses.client.Timeout = time.Duration(v * float32(time.Second)) + case float64: + ses.client.Timeout = time.Duration(v * float64(time.Second)) + default: + panic(errors.New("error type " + reflect.TypeOf(v).String())) + } + case CDialTimeout: + // TODO: CDialTimeout CRequestTimeout 与 细节 + case CIsDecompressNoAccept: + ses.Is.isDecompressNoAccept = values.(bool) + case CKeepAlives: + // println(ses.transport.DisableKeepAlives) + ses.transport.DisableKeepAlives = !values.(bool) + case CIsWithCookiejar: + v := values.(bool) + if v { + if ses.client.Jar == nil { + ses.client.Jar = ses.cookiejar + } + } else { + ses.client.Jar = nil + } + case CProxy: + switch v := values.(type) { + case string: + purl, err := (url.Parse(v)) + if err != nil { + panic(err) + } + ses.transport.Proxy = http.ProxyURL(purl) + case *url.URL: + ses.transport.Proxy = http.ProxyURL(v) + case nil: + ses.transport.Proxy = nil + } + case CInsecure: + ses.transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: values.(bool)} + case CTLS: + ses.transport.TLSClientConfig = values.(*tls.Config) + case CBasicAuth: + if ses.auth == nil { + ses.auth = &BasicAuth{} + } + + switch v := values.(type) { + case *BasicAuth: + ses.auth.User = v.User + ses.auth.Password = v.Password + case BasicAuth: + ses.auth.User = v.User + ses.auth.Password = v.Password + case []string: + ses.auth.User = v[0] + ses.auth.Password = v[1] + case nil: + ses.auth = nil + } + default: + panic(errors.New("unknown typeConfig " + reflect.TypeOf(typeConfig).String())) + } + return +} + +// SetQuery 设置url query的持久参数的值 +func (ses *Session) SetQuery(values url.Values) { + ses.Query = values +} + +// GetQuery 获取get query的值 +func (ses *Session) GetQuery() url.Values { + return ses.Query +} + +// SetHeader 设置set Header的值, 必须符合规范 HaHa -> Haha 如果真要HaHa,只能这样 Ha-Ha +func (ses *Session) SetHeader(header http.Header) { + ses.Header = header +} + +// GetHeader 获取get Header的值 +func (ses *Session) GetHeader() http.Header { + return ses.Header +} + +// SetCookies 设置Cookies 或者添加Cookies Del +func (ses *Session) SetCookies(u *url.URL, cookies []*http.Cookie) { + ses.cookiejar.SetCookies(u, cookies) +} + +// GetCookies 返回 Cookies +func (ses *Session) GetCookies(u *url.URL) []*http.Cookie { + return ses.cookiejar.Cookies(u) +} + +// DelCookies 删除 Cookies +func (ses *Session) DelCookies(u *url.URL, name string) { + cookies := ses.cookiejar.Cookies(u) + for _, c := range cookies { + if c.Name == name { + c.MaxAge = -1 + break + } + } + ses.SetCookies(u, cookies) +} + +// ClearCookies 清楚所有cookiejar上的cookies +func (ses *Session) ClearCookies() { + cjar, err := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List}) + if err != nil { + panic(err) + } + ses.cookiejar = cjar + ses.client.Jar = ses.cookiejar +} + +// Head 请求 +func (ses *Session) Head(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "HEAD" + return wf +} + +// Get 请求 +func (ses *Session) Get(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "GET" + return wf +} + +// Post 请求 +func (ses *Session) Post(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "POST" + return wf +} + +// Put 请求 +func (ses *Session) Put(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "PUT" + return wf +} + +// Patch 请求 +func (ses *Session) Patch(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "PATCH" + return wf +} + +// Options 请求 +func (ses *Session) Options(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "OPTIONS" + return wf +} + +// Delete 请求 +func (ses *Session) Delete(url string) *Workflow { + wf := NewWorkflow(ses, url) + wf.Method = "DELETE" + return wf +} + +// // CloseIdleConnections closes the idle connections that a session client may make use of +// // 从levigross/grequests 借鉴 +// func (ses *Session) CloseIdleConnections() { +// ses.client.Transport.(*http.Transport).CloseIdleConnections() +// } + +// EnsureTransporterFinalized will ensure that when the HTTP client is GCed +// the runtime will close the idle connections (so that they won't leak) +// this function was adopted from Hashicorp's go-cleanhttp package +func EnsureTransporterFinalized(httpTransport *http.Transport) { + runtime.SetFinalizer(&httpTransport, func(transportInt **http.Transport) { + (*transportInt).CloseIdleConnections() + }) +} diff --git a/session_test.go b/session_test.go new file mode 100644 index 0000000..440c91a --- /dev/null +++ b/session_test.go @@ -0,0 +1,545 @@ +package requests + +import ( + "net/http" + "net/url" + "regexp" + "strings" + "testing" + "time" + + "github.com/474420502/gjson" +) + +func TestNewSession(t *testing.T) { + ses := NewSession() + if ses == nil { + t.Error("session create fail, value is nil") + } +} + +func TestSession_Get(t *testing.T) { + type fields struct { + client *http.Client + } + type args struct { + url string + } + tests := []struct { + name string + fields fields + args args + }{ + { + name: "Get test", + fields: fields{client: &http.Client{}}, + args: args{url: "http://httpbin.org/get"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := &Session{ + client: tt.fields.client, + } + resp, err := ses.Get(tt.args.url).Execute() + if err != nil { + t.Error(err) + } + if len(resp.Content()) <= 150 { + t.Error(resp.Content()) + } + }) + } +} + +func TestSession_Post(t *testing.T) { + type args struct { + params []interface{} + } + + tests := []struct { + name string + args args + want *regexp.Regexp + }{ + { + name: "Post test", + args: args{params: nil}, + want: regexp.MustCompile(`"form": \{\}`), + }, + { + name: "Post data", + args: args{params: []interface{}{[]byte("a=1&b=2")}}, + want: regexp.MustCompile(`"form": \{[^"]+"a": "1"[^"]+"b": "2"[^\}]+\}`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + got, err := ses.Post("http://httpbin.org/post").SetBodyAuto(tt.args.params...).Execute() + + if err != nil { + t.Errorf("Metchod error = %v", err) + return + } + + if tt.want.MatchString(got.readContent) == false { + t.Errorf("Metchod = %v, want %v", got, tt.want) + } + + }) + } +} + +func TestSession_Setparams(t *testing.T) { + type fields struct { + client *http.Client + params *Body + } + type args struct { + params []interface{} + } + tests := []struct { + name string + fields fields + args args + want *regexp.Regexp + wantErr bool + }{ + { + name: "test Setparams", + args: args{params: []interface{}{map[string]string{"a": "1", "b": "2"}}}, + want: regexp.MustCompile(`"form": \{[^"]+"a": "1"[^"]+"b": "2"[^\}]+\}`), + }, + { + name: "test json", + args: args{params: []interface{}{`{"a":"1","b":"2"}`, TypeJSON}}, + want: regexp.MustCompile(`"json": \{[^"]+"a": "1"[^"]+"b": "2"[^\}]+\}`), + }, + { + name: "test xml", + fields: fields{client: &http.Client{}, params: NewBody()}, + args: args{params: []interface{}{`test`, TypeXML}}, + want: regexp.MustCompile(`"data": "test"`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + + got, err := ses.Post("http://httpbin.org/post").SetBodyAuto(tt.args.params...).Execute() + if (err != nil) != tt.wantErr { + t.Errorf("Metchod error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.want.MatchString(got.readContent) == false { + t.Errorf("Metchod = %v, want %v", got, tt.want) + } + }) + } +} + +func TestSession_PostUploadFile(t *testing.T) { + type args struct { + params interface{} + } + + tests := []struct { + name string + args args + want *regexp.Regexp + }{ + { + name: "test post uploadfile glob", + args: args{params: "tests/*.js"}, + want: regexp.MustCompile(`"file0": "data:application/octet-stream;base64`), + }, + { + name: "test post uploadfile only one file", + args: args{params: "tests/json.file"}, + want: regexp.MustCompile(`"file0": "json.file.+jsonjsonjsonjson"`), + }, + { + name: "test post uploadfile key values", + args: args{params: map[string]string{"a": "32"}}, + want: regexp.MustCompile(`"a": "32"`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + got, err := ses.Post("http://httpbin.org/post").SetBodyAuto(tt.args.params, TypeFormData).Execute() + + if err != nil { + t.Errorf("Metchod error = %v", err) + return + } + + if tt.want.MatchString(got.readContent) == false { + t.Errorf("Metchod = %v, want %v", got, tt.want) + } + + }) + } +} + +func TestSession_Put(t *testing.T) { + type args struct { + params interface{} + } + + tests := []struct { + name string + args args + want *regexp.Regexp + }{ + { + name: "test post uploadfile glob", + args: args{params: "tests/*.js"}, + want: regexp.MustCompile(`"file0": "data:application/octet-stream;base64`), + }, + { + name: "test post uploadfile only one file", + args: args{params: "tests/json.file"}, + want: regexp.MustCompile(`"file0": "json.file.+jsonjsonjsonjson"`), + }, + { + name: "test post uploadfile key values", + args: args{params: map[string]string{"a": "32"}}, + want: regexp.MustCompile(`"a": "32"`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + got, err := ses.Put("http://httpbin.org/put").SetBodyAuto(tt.args.params, TypeFormData).Execute() + + if err != nil { + t.Errorf("Metchod error = %v", err) + return + } + + if tt.want.MatchString(got.readContent) == false { + t.Errorf("Metchod = %v, want %v", got, tt.want) + } + + }) + } +} + +func TestSession_Patch(t *testing.T) { + type args struct { + params interface{} + } + + tests := []struct { + name string + args args + want *regexp.Regexp + }{ + { + name: "test post uploadfile glob", + args: args{params: "tests/*.js"}, + want: regexp.MustCompile(`"file0": "data:application/octet-stream;base64`), + }, + { + name: "test post uploadfile only one file", + args: args{params: "tests/json.file"}, + want: regexp.MustCompile(`"file0": "json.file.+jsonjsonjsonjson"`), + }, + { + name: "test post uploadfile key values", + args: args{params: map[string]string{"a": "32"}}, + want: regexp.MustCompile(`"a": "32"`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + got, err := ses.Patch("http://httpbin.org/patch").SetBodyAuto(tt.args.params, TypeFormData).Execute() + + if err != nil { + t.Errorf("Metchod error = %v", err) + return + } + + if tt.want.MatchString(got.readContent) == false { + t.Errorf("Metchod = %v, want %v", got, tt.want) + } + + }) + } +} + +func TestSession_SetConfig(t *testing.T) { + + type args struct { + typeConfig TypeConfig + values interface{} + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "test timeout", + args: args{typeConfig: CRequestTimeout, values: 0.0001}, + wantErr: true, + }, + + { + name: "test not timeout", + args: args{typeConfig: CRequestTimeout, values: 5}, + wantErr: false, + }, + + { + name: "test proxy", + args: args{typeConfig: CProxy, values: "http://" + ProxyAddress}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ses := NewSession() + ses.SetConfig(tt.args.typeConfig, tt.args.values) + _, err := ses.Get("http://httpbin.org/get").Execute() + + if (err != nil) != tt.wantErr { + t.Errorf("Metchod error = %v", err) + return + } + + }) + } +} + +func TestSession_SetConfigInsecure(t *testing.T) { + + ses := NewSession() + ses.SetConfig(CInsecure, true) + + for _, badSSL := range []string{ + "https://self-signed.badssl.com/", + "https://expired.badssl.com/", + "https://wrong.host.badssl.com/", + } { + resp, err := ses.Get(badSSL).Execute() + if err != nil { + t.Error("Unable to make request", err) + } + if resp.readResponse.StatusCode != 200 { + t.Error("Request did not return OK, is ", resp.readResponse.StatusCode) + } + } + +} + +func TestSession_Cookies(t *testing.T) { + ses := NewSession() + + t.Run("set cookie", func(t *testing.T) { + resp, err := ses.Get("http://httpbin.org/cookies/set").AddKVCookie("a", "1").Execute() + if err != nil { + t.Error("cookies set error", err) + } + + if !regexp.MustCompile(`"a": "1"`).MatchString(resp.readContent) { + t.Error(resp.readContent) + } + }) +} + +func TestSession_Header(t *testing.T) { + chromeua := "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36" + ses := NewSession() + + t.Run("ua header test", func(t *testing.T) { + + ses.Header.Add(HeaderKeyUA, chromeua) + resp, err := ses.Get("https://www.baidu.com").Execute() + if err != nil { + t.Error("cookies set error", err) + } + + if len(resp.readContent) <= 5000 { + t.Error(resp.readContent, len(resp.readContent)) + } + + ses = NewSession() + resp, err = ses.Get("https://www.baidu.com").AddHeader(HeaderKeyUA, chromeua).Execute() + if err != nil { + t.Error("cookies set error", err) + } + + if len(resp.readContent) <= 5000 { + t.Error(resp.readContent, len(resp.readContent)) + } + }) +} + +func TestSession_ConfigEx(t *testing.T) { + ses := NewSession() + ses.SetConfig(CRequestTimeout, time.Microsecond) + resp, err := ses.Get("http://httpbin.org/get").Execute() + if err == nil { + t.Error(resp) + } else { + if strings.LastIndex(err.Error(), "Client.Timeout exceeded while awaiting headers") < 0 { + t.Error(err) + } + } + + ses.SetConfig(CRequestTimeout, float32(0.0000001)) + resp, err = ses.Get("http://httpbin.org/get").Execute() + if err == nil { + t.Error(resp) + } else { + if strings.LastIndex(err.Error(), "Client.Timeout exceeded while awaiting headers") < 0 { + t.Error(err) + } + } + + ses.SetConfig(CKeepAlives, true) + ses.SetConfig(CRequestTimeout, int64(5)) + // jar, _ := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List}) + u, err := url.Parse("http://httpbin.org") + if err != nil { + t.Error(err) + } else { + // jar.SetCookies(u, []*http.Cookie{&http.Cookie{Name: "Request", Value: "Cookiejar"}}) + ses.SetConfig(CIsWithCookiejar, false) + ses.SetConfig(CIsWithCookiejar, true) + ses.SetCookies(u, []*http.Cookie{&http.Cookie{Name: "Request", Value: "Cookiejar"}, &http.Cookie{Name: "eson", Value: "bad"}}) + resp, err = ses.Get("http://httpbin.org/get").Execute() + if err != nil { + t.Error(err) + } + + if gjson.Get(resp.Content(), "headers.Cookie").String() != "Request=Cookiejar; eson=bad" { + t.Error(resp.Content()) + } + + if resp.GetSrcResponse().Header["Connection"][0] != "keep-alive" { + t.Error("CKeepAlive is error") + } + } + + ses.SetConfig(CProxy, nil) + if u, err := url.Parse("http://" + ProxyAddress); err != nil { + t.Error(err) + } else { + ses.SetConfig(CProxy, u) + } + + resp, err = ses.Get("http://httpbin.org/get").Execute() + if err != nil { + t.Error(err) + } + + ses.DelCookies(u, "eson") + resp, err = ses.Get("http://httpbin.org/cookies").Execute() + if err != nil { + t.Error(err) + } + cookies := ses.GetCookies(u) + if len(cookies) != 1 && cookies[0].String() != "Request=Cookiejar" { + t.Error("cookies del get error please check it") + } + + ses.ClearCookies() + resp, err = ses.Get("http://httpbin.org/cookies").Execute() + if err != nil { + t.Error(err) + } + if gjson.Get(resp.Content(), "cookies").String() != "{}" { + t.Error(resp.Content()) + } +} + +func TestSession_SetQuery(t *testing.T) { + ses := NewSession() + ses.SetQuery(url.Values{"query": []string{"a", "b"}}) + resp, err := ses.Get("http://httpbin.org/get").Execute() + if err != nil { + t.Error(err) + } + query := gjson.Get(resp.Content(), "args.query").Array() + for _, q := range query { + if !(q.String() == "a" || q.String() == "b") { + t.Error("query error, ", resp.Content()) + } + } +} + +func TestSession_SetHeader(t *testing.T) { + ses := NewSession() + var header http.Header + header = make(http.Header) + header["xx-xx"] = []string{"Header"} + ses.SetHeader(header) + + resp, err := ses.Get("http://httpbin.org/headers").Execute() + if err != nil { + t.Error(err) + } + + if gjson.Get(resp.Content(), "headers.Xx-Xx").String() != "Header" { + t.Error("Xx-Xx is not exists", resp.Content()) + } + + if ses.GetHeader()["xx-xx"][0] != "Header" { + t.Error("header error") + } +} + +func TestSession_SetBasicAuth(t *testing.T) { + ses := NewSession() + ses.SetConfig(CBasicAuth, &BasicAuth{User: "eson", Password: "123456"}) + resp, err := ses.Get("http://httpbin.org/basic-auth/eson/123456").Execute() + if err != nil { + t.Error(err) + } + if resp.GetSrcResponse().StatusCode != 200 { + t.Error("code != 200, code = ", resp.GetStatue()) + } + + ses.SetConfig(CBasicAuth, BasicAuth{User: "eson", Password: "12345"}) + resp, err = ses.Get("http://httpbin.org/basic-auth/eson/123456").Execute() + if err != nil { + t.Error(err) + } + + if resp.GetSrcResponse().StatusCode != 401 { + t.Error("code != 401, code = ", resp.GetStatue()) + } + + resp, err = ses.Get("http://httpbin.org/basic-auth/eson/123456").Execute() + if err != nil { + t.Error(err) + } + + if resp.GetSrcResponse().StatusCode != 401 { + t.Error("code != 401, code = ", resp.GetStatue()) + } + + ses.SetConfig(CBasicAuth, []string{"son", "123456"}) + resp, err = ses.Get("http://httpbin.org/basic-auth/eson/123456").Execute() + if err != nil { + t.Error(err) + } + if resp.GetSrcResponse().StatusCode != 401 { + t.Error("code != 401, code = ", resp.GetStatue()) + } + + ses.SetConfig(CBasicAuth, nil) + resp, err = ses.Get("http://httpbin.org/basic-auth/eson/123456").Execute() + if err != nil { + t.Error(err) + } + if resp.GetSrcResponse().StatusCode != 401 { + t.Error("code != 401, code = ", resp.GetStatue()) + } +} diff --git a/tests/file.js b/tests/file.js new file mode 100644 index 0000000..eb18604 Binary files /dev/null and b/tests/file.js differ diff --git a/tests/json.file b/tests/json.file new file mode 100644 index 0000000..fa254a1 --- /dev/null +++ b/tests/json.file @@ -0,0 +1,2 @@ +json.file +fdsfsdavxvxwewe32323412jsonjsonjsonjson \ No newline at end of file diff --git a/tests/learn.js b/tests/learn.js new file mode 100644 index 0000000..53f60ed --- /dev/null +++ b/tests/learn.js @@ -0,0 +1,2 @@ +learn.js +fdsfsdavxlearnlearnlearnlearn \ No newline at end of file diff --git a/upload_file.go b/upload_file.go new file mode 100644 index 0000000..6690579 --- /dev/null +++ b/upload_file.go @@ -0,0 +1,101 @@ +package requests + +import ( + "io" + "log" + "os" + "path/filepath" +) + +// UploadFile 上传文件的结构 +type UploadFile struct { + FileName string + FieldName string + FileReaderCloser io.ReadCloser +} + +// SetFileName 设置FileName属性 +func (ufile *UploadFile) SetFileName(filename string) { + ufile.FileName = filename +} + +// GetFileName 设置FileName属性 +func (ufile *UploadFile) GetFileName() string { + return ufile.FileName +} + +// SetFileReaderCloser 设置FileName属性 +func (ufile *UploadFile) SetFileReaderCloser(readerCloser io.ReadCloser) { + ufile.FileReaderCloser = readerCloser +} + +// SetFileReaderCloserFromFile 设置FileName属性 +func (ufile *UploadFile) SetFileReaderCloserFromFile(filename string) error { + fd, err := os.Open(filename) + if err != nil { + return err + } + ufile.SetFileReaderCloser(fd) + return nil +} + +// GetFileReaderCloser 设置FileName属性 +func (ufile *UploadFile) GetFileReaderCloser() io.ReadCloser { + return ufile.FileReaderCloser +} + +// SetFieldName 设置FileName属性 +func (ufile *UploadFile) SetFieldName(fieldname string) { + ufile.FieldName = fieldname +} + +// GetFieldName 设置FileName属性 +func (ufile *UploadFile) GetFieldName() string { + return ufile.FieldName +} + +// NewUploadFile 创建一个空的UploadFile, 必须设置 FileName FieldName FileReaderCloser 三个属性 +func NewUploadFile() *UploadFile { + return &UploadFile{} +} + +// UploadFileFromPath 从本地文件获取上传文件 +func UploadFileFromPath(fileName string) (*UploadFile, error) { + fd, err := os.Open(fileName) + if err != nil { + return nil, err + } + + return &UploadFile{FileReaderCloser: fd, FileName: fileName}, nil +} + +// UploadFileFromGlob 根据Glob从本地文件获取上传文件 +func UploadFileFromGlob(glob string) ([]*UploadFile, error) { + files, err := filepath.Glob(glob) + + if err != nil { + return nil, err + } + + if len(files) == 0 { + log.Println("UploadFileFromGlob: len(files) == 0") + } + + var ufiles []*UploadFile + + for _, f := range files { + if s, err := os.Stat(f); err != nil || s.IsDir() { + continue + } + + fd, err := os.Open(f) + if err != nil { + log.Println(fd.Name(), err) + } else { + ufiles = append(ufiles, &UploadFile{FileReaderCloser: fd, FileName: filepath.Base(fd.Name())}) + } + } + + return ufiles, nil + +} diff --git a/upload_file_test.go b/upload_file_test.go new file mode 100644 index 0000000..860e9b0 --- /dev/null +++ b/upload_file_test.go @@ -0,0 +1,107 @@ +package requests + +import ( + "testing" + + "github.com/474420502/gjson" +) + +func TestUploadFile(t *testing.T) { + + for i := 0; i < 1; i++ { + + ses := NewSession() + wf := ses.Put("http://httpbin.org/put") + + ufile, err := UploadFileFromPath("tests/json.file") + if err != nil { + t.Error(err) + } + wf.SetBodyAuto(ufile, TypeFormData) + resp, _ := wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file0"]; !ok { + t.Error("file error", resp.Content()) + } + + ses = NewSession() + wf = ses.Patch("http://httpbin.org/patch") + + wf.SetBodyAuto("tests/json.file", TypeFormData) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file0"]; !ok { + t.Error("file error", resp.Content()) + } + + ses = NewSession() + wf = ses.Delete("http://httpbin.org/delete") + ufile = NewUploadFile() + ufile.SetFileName("MyFile") + ufile.SetFieldName("MyField") + ufile.SetFileReaderCloserFromFile("tests/json.file") + wf.SetBodyAuto(ufile) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["MyField"]; !ok { + t.Error("file error", resp.Content()) + } + + // ses = NewSession() + // wf = ses.Put("http://httpbin.org/put") + + ufile.SetFileReaderCloserFromFile("tests/json.file") + wf.SetBodyAuto(*ufile) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["MyField"]; !ok { + t.Error("file error", resp.Content()) + } + + // ses = NewSession() + // wf = ses.Put("http://httpbin.org/put") + + ufile = NewUploadFile() + ufile.SetFileName("MyFile") + ufile.SetFileReaderCloserFromFile("tests/json.file") + wf.SetBodyAuto(ufile) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file0"]; !ok { + t.Error("file error", resp.Content()) + } + + ufile.SetFileReaderCloserFromFile("tests/json.file") + wf.SetBodyAuto(*ufile) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file0"]; !ok { + t.Error("file error", resp.Content()) + } + + var ufileList []*UploadFile + ufile, err = UploadFileFromPath("tests/json.file") + if err != nil { + t.Error(err) + } + ufileList = append(ufileList, ufile) + ufile, err = UploadFileFromPath("tests/learn.js") + if err != nil { + t.Error(err) + } + ufileList = append(ufileList, ufile) + wf.SetBodyAuto(ufileList) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file1"]; !ok { + t.Error("file error", resp.Content()) + } + + if wf.GetBody().ContentType() != "" { + t.Error("Body is not Clear") + } + + wf.SetBodyAuto([]string{"tests/learn.js", "tests/json.file"}, TypeFormData) + resp, _ = wf.Execute() + if _, ok := gjson.Get(resp.Content(), "files").Map()["file1_0"]; !ok { + t.Error("file error", resp.Content()) + } + if _, ok := gjson.Get(resp.Content(), "files").Map()["file0_0"]; !ok { + t.Error("file error", resp.Content()) + } + } + +} diff --git a/workflow.go b/workflow.go new file mode 100644 index 0000000..0fc0afc --- /dev/null +++ b/workflow.go @@ -0,0 +1,338 @@ +package requests + +import ( + "net/http" + "net/url" + "regexp" + "strings" +) + +// Workflow 工作流 设计点: 这个并不影响Session的属性变化 如 NewWorkflow(ses, url).AddHeader() 对ses没影响 +type Workflow struct { + session *Session + ParsedURL *url.URL + Method string + Body IBody + Header http.Header + Cookies map[string]*http.Cookie +} + +// NewWorkflow new and init workflow +func NewWorkflow(ses *Session, urlstr string) *Workflow { + wf := &Workflow{} + wf.SwitchSession(ses) + wf.SetRawURL(urlstr) + + wf.Body = NewBody() + wf.Header = make(http.Header) + wf.Cookies = make(map[string]*http.Cookie) + return wf +} + +// SwitchSession 替换Session +func (wf *Workflow) SwitchSession(ses *Session) { + wf.session = ses +} + +// AddHeader 添加头信息 Get方法从Header参数上获取 必须符合规范 HaHa -> Haha 如果真要HaHa,只能这样 Ha-Ha +func (wf *Workflow) AddHeader(key, value string) *Workflow { + wf.Header[key] = append(wf.Header[key], value) + return wf +} + +// SetHeader 设置完全替换原有Header 必须符合规范 HaHa -> Haha 如果真要HaHa,只能这样 Ha-Ha +func (wf *Workflow) SetHeader(header http.Header) *Workflow { + wf.Header = make(http.Header) + for k, HValues := range header { + var newHValues []string + for _, value := range HValues { + newHValues = append(newHValues, value) + } + wf.Header[k] = newHValues + } + return wf +} + +// GetHeader 获取Workflow Header +func (wf *Workflow) GetHeader() http.Header { + return wf.Header +} + +// GetCombineHeader 获取后的Header信息 +func (wf *Workflow) GetCombineHeader() http.Header { + return mergeMapList(wf.session.Header, wf.Header) +} + +// DelHeader 添加头信息 Get方法从Header参数上获取 +func (wf *Workflow) DelHeader(key string) *Workflow { + wf.Header.Del(key) + return wf +} + +// AddCookie 添加Cookie +func (wf *Workflow) AddCookie(c *http.Cookie) *Workflow { + wf.Cookies[c.Name] = c + return wf +} + +// AddCookies 添加[]*http.Cookie +func (wf *Workflow) AddCookies(cookies []*http.Cookie) *Workflow { + for _, c := range cookies { + wf.AddCookie(c) + } + return wf +} + +// AddKVCookie 添加 以 key value 的 Cookie +func (wf *Workflow) AddKVCookie(name, value string) *Workflow { + wf.Cookies[name] = &http.Cookie{Name: name, Value: value} + return wf +} + +// DelCookie 删除Cookie +func (wf *Workflow) DelCookie(name interface{}) *Workflow { + switch n := name.(type) { + case string: + if _, ok := wf.Cookies[n]; ok { + delete(wf.Cookies, n) + return wf + } + case *http.Cookie: + if _, ok := wf.Cookies[n.Name]; ok { + delete(wf.Cookies, n.Name) + return wf + } + default: + panic("name type is not support") + } + return nil +} + +// GetParsedURL 获取url的string形式 +func (wf *Workflow) GetParsedURL() *url.URL { + return wf.ParsedURL +} + +// SetParsedURL 获取url的string形式 +func (wf *Workflow) SetParsedURL(u *url.URL) *Workflow { + wf.ParsedURL = u + return wf +} + +// GetRawURL 获取url的string形式 +func (wf *Workflow) GetRawURL() string { + u := strings.Split(wf.ParsedURL.String(), "?")[0] + "?" + wf.GetCombineQuery().Encode() + return u +} + +// SetRawURL 设置 url +func (wf *Workflow) SetRawURL(srcURL string) *Workflow { + purl, err := url.ParseRequestURI(srcURL) + if err != nil { + panic(err) + } + wf.ParsedURL = purl + return wf +} + +// GetQuery 获取Query参数 +func (wf *Workflow) GetQuery() url.Values { + return wf.ParsedURL.Query() +} + +// GetCombineQuery 获取Query参数 +func (wf *Workflow) GetCombineQuery() url.Values { + if wf.ParsedURL != nil { + vs := wf.ParsedURL.Query() + return mergeMapList(wf.session.GetQuery(), vs) + } + return nil +} + +// SetQuery 设置Query参数 +func (wf *Workflow) SetQuery(query url.Values) *Workflow { + if query == nil { + return wf + } + query = (url.Values)(mergeMapList(wf.session.Query, query)) + wf.ParsedURL.RawQuery = query.Encode() + return wf +} + +var regexGetPath = regexp.MustCompile("/[^/]*") + +// GetURLPath 获取Path参数 http://localhost/anything/user/pwd return [/anything /user /pwd] +func (wf *Workflow) GetURLPath() []string { + return regexGetPath.FindAllString(wf.ParsedURL.Path, -1) +} + +// GetURLRawPath 获取未分解Path参数 +func (wf *Workflow) GetURLRawPath() string { + return wf.ParsedURL.Path +} + +// encodePath path格式每个item都必须以/开头 +func encodePath(path []string) string { + rawpath := "" + for _, p := range path { + if p[0] != '/' { + p = "/" + p + } + rawpath += p + } + return rawpath +} + +// SetURLPath 设置Path参数 对应 GetURLPath +func (wf *Workflow) SetURLPath(path []string) *Workflow { + if path == nil { + return wf + } + wf.ParsedURL.Path = encodePath(path) + return wf +} + +// SetURLRawPath 设置 参数 eg. /get = http:// hostname + /get +func (wf *Workflow) SetURLRawPath(path string) *Workflow { + if path[0] != '/' { + wf.ParsedURL.Path = "/" + path + } else { + wf.ParsedURL.Path = path + } + return wf +} + +// SetBody 参数设置 +func (wf *Workflow) SetBody(body IBody) *Workflow { + wf.Body = body + return wf +} + +// GetBody 参数设置 +func (wf *Workflow) GetBody() IBody { + return wf.Body +} + +// SetBodyAuto 参数设置 +func (wf *Workflow) SetBodyAuto(params ...interface{}) *Workflow { + + if params != nil { + plen := len(params) + defaultContentType := TypeURLENCODED + + if plen >= 2 { + t := params[plen-1] + defaultContentType = t.(string) + } + + wf.Body.SetPrefix(defaultContentType) + + switch defaultContentType { + case TypeFormData: + createMultipart(wf.Body, params) // 还存在 Mixed的可能 + default: + var values url.Values + switch param := params[0].(type) { + case map[string]string: + values := make(url.Values) + for k, v := range param { + values.Set(k, v) + } + wf.Body.SetIOBody([]byte(values.Encode())) + case map[string][]string: + values = param + wf.Body.SetIOBody([]byte(values.Encode())) + case string: + wf.Body.SetIOBody([]byte(param)) + case []byte: + wf.Body.SetIOBody(param) + + case *UploadFile: + params = append(params, TypeFormData) + wf.Body.SetPrefix(TypeFormData) + createMultipart(wf.Body, params) + case UploadFile: + params = append(params, TypeFormData) + wf.Body.SetPrefix(TypeFormData) + createMultipart(wf.Body, params) + case []*UploadFile: + params = append(params, TypeFormData) + wf.Body.SetPrefix(TypeFormData) + createMultipart(wf.Body, params) + case []UploadFile: + params = append(params, TypeFormData) + wf.Body.SetPrefix(TypeFormData) + createMultipart(wf.Body, params) + } + } + + } + return wf +} + +func mergeMapList(headers ...map[string][]string) map[string][]string { + + set := make(map[string]map[string]int) + merged := make(map[string][]string) + + for _, header := range headers { + for key, values := range header { + for _, v := range values { + if vs, ok := set[key]; ok { + vs[v] = 1 + } else { + set[key] = make(map[string]int) + set[key][v] = 1 + } + } + } + } + + for key, mvalue := range set { + for v := range mvalue { + // merged.Add(key, v) + if mergeValue, ok := merged[key]; ok { + merged[key] = append(mergeValue, v) + } else { + merged[key] = []string{v} + } + } + } + + return merged +} + +// setHeaderRequest 设置request的头 +func setHeaderRequest(req *http.Request, wf *Workflow) { + req.Header = mergeMapList(req.Header, wf.session.Header, wf.Header) +} + +// setHeaderRequest 设置request的临时Cookie, 永久需要在session上设置cookie +func setTempCookieRequest(req *http.Request, wf *Workflow) { + if wf.Cookies != nil { + for _, c := range wf.Cookies { + req.AddCookie(c) + } + } +} + +// Execute 执行 +func (wf *Workflow) Execute() (*Response, error) { + + req := buildBodyRequest(wf) + + setHeaderRequest(req, wf) + setTempCookieRequest(req, wf) + + if wf.session.auth != nil { + req.SetBasicAuth(wf.session.auth.User, wf.session.auth.Password) + } + + resp, err := wf.session.client.Do(req) + if err != nil { + return nil, err + } + + wf.Body = NewBody() + return FromHTTPResponse(resp, wf.session.Is.isDecompressNoAccept) +} diff --git a/workflow_test.go b/workflow_test.go new file mode 100644 index 0000000..d42698a --- /dev/null +++ b/workflow_test.go @@ -0,0 +1,306 @@ +package requests + +import ( + "net/http" + "net/url" + "regexp" + "sort" + "testing" + + "github.com/474420502/gjson" +) + +func TestWorkflow(t *testing.T) { + ses := NewSession() + + t.Run("set cookie", func(t *testing.T) { + resp, err := ses.Get("http://httpbin.org/cookies/set").AddKVCookie("a", "1").Execute() + if err != nil { + t.Error("cookies set error", err) + } + + if !regexp.MustCompile(`"a": "1"`).MatchString(resp.readContent) { + t.Error(resp.readContent) + } + + wf := ses.Get("http://httpbin.org/cookies/set") + resp, err = wf.AddKVCookie("b", "2").Execute() + if err != nil { + t.Error("cookies set error", err) + } + + result := gjson.Get(resp.readContent, "cookies.a") + if result.Exists() { + t.Error(resp.readContent) + } + + result = gjson.Get(resp.readContent, "cookies.b") + if result.Int() != 2 { + t.Error(resp.readContent) + } + + resp, err = wf.AddKVCookie("a", "3").Execute() + results := gjson.GetMany(resp.readContent, "cookies.a", "cookies.b") + if results[0].Int() != 3 { + t.Error(resp.readContent) + } + + if results[1].Int() != 2 { + t.Error(resp.readContent) + } + + resp, err = wf.AddHeader("XX", "123").SetRawURL("http://httpbin.org/headers").Execute() + if err != nil { + t.Error("cookies set error", err) + } + + // headers 只能是String 表示 + result = gjson.Get(resp.readContent, "headers.Xx") + if result.String() != "123" { + t.Error(resp.readContent) + } + }) + +} + +func TestWorkflow_SetHeader(t *testing.T) { + ses := NewSession() + wf := ses.Get("http://httpbin.org/headers") + var header http.Header + header = make(http.Header) + header["Eson"] = []string{"Bad"} + header["HaHa"] = []string{"xixi"} + wf.SetHeader(header) + + resp, err := wf.Execute() + if err == nil && gjson.Get(resp.Content(), "headers.Eson").String() != "Bad" { + t.Error("wf header error", resp.Content()) + } + + if err == nil && gjson.Get(resp.Content(), "headers.Haha").String() != "xixi" { + t.Error("wf header error", resp.Content()) + } + + // 输入不符合规范不 会自动转换 + if wf.GetHeader()["HaHa"][0] != "xixi" { + t.Error("Header 错误") + } + + if len(ses.GetHeader()) != 0 { + t.Error("session header should be zero") + } + + delete(header, "HaHa") + ses.SetHeader(header) + wf = ses.Get("http://httpbin.org/headers") + wf.AddHeader("Hello", "Hehe") + + resp, err = wf.Execute() + if err != nil || gjson.Get(resp.Content(), "headers.Eson").String() != "Bad" { + t.Error("wf header error", resp.Content()) + } + + if err != nil || gjson.Get(resp.Content(), "headers.Hello").String() != "Hehe" { + t.Error("wf header error", resp.Content()) + } + + if len(wf.GetHeader()) != 1 || wf.GetHeader()["Hello"][0] != "Hehe" { + t.Error("session header should be 1") + } + + cheader := wf.GetCombineHeader() + if len(cheader) != 2 || cheader["Eson"][0] != "Bad" { + t.Error("GetCombineHeader error") + } + + resp, err = wf.DelHeader("Hello").Execute() + if err != nil { + t.Error(err, resp.Content()) + } + + if gjson.Get(resp.Content(), "headers.Hello").Exists() { + t.Error(" wf.DelHeader error") + } +} + +func TestWorkflow_Cookies(t *testing.T) { + ses := NewSession() + u, err := url.Parse("http://httpbin.org") + if err != nil { + t.Error(err) + } + ses.SetCookies(u, []*http.Cookie{&http.Cookie{Name: "Request", Value: "Cookiejar"}}) + wf := ses.Get("http://httpbin.org/cookies") + wf.AddCookie(&http.Cookie{Name: "eson", Value: "Bad"}) + + resp, _ := wf.Execute() + if gjson.Get(resp.Content(), "cookies.Request").String() != "Cookiejar" { + t.Error(" wf.AddCookie error") + } + + if gjson.Get(resp.Content(), "cookies.eson").String() != "Bad" { + t.Error(" wf.AddCookie error") + } + + wf.DelCookie("eson") + resp, _ = wf.Execute() + if gjson.Get(resp.Content(), "cookies.Request").String() != "Cookiejar" { + t.Error(" wf.AddCookie error") + } + if gjson.Get(resp.Content(), "cookies.eson").Exists() { + t.Error(" wf.DelCookie error") + } + + wf.AddCookies([]*http.Cookie{&http.Cookie{Name: "A", Value: "AA"}, &http.Cookie{Name: "B", Value: "BB"}}) + + resp, _ = wf.Execute() + if gjson.Get(resp.Content(), "cookies.Request").String() != "Cookiejar" { + t.Error(" wf.AddCookie error") + } + if gjson.Get(resp.Content(), "cookies.A").String() != "AA" { + t.Error(" wf.AddCookies error") + } + + if gjson.Get(resp.Content(), "cookies.B").String() != "BB" { + t.Error(" wf.AddCookies error") + } + + wf.DelCookie(&http.Cookie{Name: "A", Value: "AA"}) + resp, _ = wf.Execute() + if gjson.Get(resp.Content(), "cookies.A").Exists() { + t.Error(" wf.AddCookies error") + } + + if gjson.Get(resp.Content(), "cookies.B").String() != "BB" { + t.Error(" wf.AddCookies error") + } +} + +func TestWorkflow_URL(t *testing.T) { + ses := NewSession() + wf := ses.Get("http://httpbin.org/") + u, err := url.Parse("http://httpbin.org/get") + if err != nil { + t.Error(err) + } + wf.SetParsedURL(u) + resp, _ := wf.Execute() + if gjson.Get(resp.Content(), "url").String() != "http://httpbin.org/get" { + t.Error("SetParsedURL ", resp.Content()) + } + + if wf.GetParsedURL().String() != "http://httpbin.org/get" { + t.Error("SetParsedURL ", resp.Content()) + } + + wf = ses.Get("http://httpbin.org/") + + resp, _ = wf.SetURLRawPath("/get").Execute() + if gjson.Get(resp.Content(), "url").String() != "http://httpbin.org/get" { + t.Error("SetParsedURL ", resp.Content()) + } + + if wf.GetURLRawPath() != "/get" { + t.Error("SetParsedURL ", resp.Content()) + } + + resp, _ = wf.SetURLRawPath("anything/user/password").Execute() + if gjson.Get(resp.Content(), "url").String() != "http://httpbin.org/anything/user/password" { + t.Error("SetParsedURL ", resp.Content()) + } + paths := wf.GetURLPath() + if paths[0] != "/anything" || paths[1] != "/user" || paths[2] != "/password" { + t.Error("wf.GetURLPath()", paths) + } + + wf = ses.Get("http://httpbin.org/") + wf.SetURLPath(paths) + if gjson.Get(resp.Content(), "url").String() != "http://httpbin.org/anything/user/password" { + t.Error("SetParsedURL ", resp.Content()) + } +} + +func TestWorkflow_Query(t *testing.T) { + ses := NewSession() + query := make(url.Values) + query["session"] = []string{"true"} + ses.SetQuery(query) + wf := ses.Get("http://httpbin.org/get") + wfquery := make(url.Values) + wfquery["workflow"] = []string{"do", "to"} + wf.SetQuery(wfquery) + + resp, _ := wf.Execute() + result := gjson.Get(resp.Content(), "args.workflow") + + for _, r := range result.Array() { + if !(r.String() == "to" || r.String() == "do") { + t.Error("workflow SetQuery error") + } + } + + if gjson.Get(resp.Content(), "args.session").String() != "true" { + t.Error("session SetQuery error") + } + + if v, ok := wf.GetQuery()["workflow"]; ok { + sort.Slice(v, func(i, j int) bool { + if v[i] > v[j] { + return true + } + return false + }) + if !(v[0] == "to" && v[1] == "do") && len(v) != 2 { + t.Error("workflow GetQuery", v) + } + } + + if v, ok := wf.GetQuery()["session"]; ok { + if v[0] != "true" && len(v) != 1 { + t.Error("workflow error") + } + } +} + +func TestWorkflow_Body(t *testing.T) { + ses := NewSession() + wf := ses.Post("http://httpbin.org/post") + body := NewBody() + body.SetIOBody("a=1&b=2") + wf.SetBody(body) + resp, _ := wf.Execute() + form := gjson.Get(resp.Content(), "form").Map() + if v, ok := form["a"]; ok { + if v.String() != "1" { + t.Error(v) + } + } + + if v, ok := form["b"]; ok { + if v.String() != "2" { + t.Error(v) + } + } + + body.SetPrefix(TypeJSON) + body.SetIOBody(`{"a": "1", "b": "2"}`) + wf.SetBody(body) + resp, _ = wf.Execute() + json := gjson.Get(resp.Content(), "json").Map() + if v, ok := json["a"]; ok { + if v.String() != "1" { + t.Error(v) + } + } + + if v, ok := json["b"]; ok { + if v.String() != "2" { + t.Error(v) + } + } + + // body.SetPrefix(TypeXML) + // body.SetIOBody(`12`) + // wf.SetBody(body) + // resp, _ = wf.Execute() +}