Skip to content

Commit

Permalink
Merge pull request #83 from meian/74-crawler-login-test
Browse files Browse the repository at this point in the history
/login のクローラーのテストを追加
  • Loading branch information
meian authored Jul 25, 2024
2 parents 0321706 + 7dd190f commit 3161fb6
Show file tree
Hide file tree
Showing 4 changed files with 922 additions and 0 deletions.
115 changes: 115 additions & 0 deletions crawler/common_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
package crawler_test

import (
"embed"
"io"
"net/http"
"net/url"
"path"
"strings"
"testing"
"time"

"github.com/pkg/errors"
"github.com/stretchr/testify/require"
)

type htmlMap map[string]string

func (m htmlMap) Get(key string) string {
if key == "no-html" {
return "no html"
}
return m[key]
}

var (
//go:embed testdata
testdata embed.FS
htmlmap = map[string]htmlMap{}
)

func testHTMLMap(t *testing.T, target string) htmlMap {
if m, ok := htmlmap[target]; ok {
return m
}
t.Helper()
m := make(map[string]string)
dir := path.Join("testdata", target)
es, err := testdata.ReadDir(dir)
require.NoError(t, err)
for _, e := range es {
require.False(t, e.IsDir())
b, err := testdata.ReadFile(path.Join(dir, e.Name()))
require.NoError(t, err)
m[e.Name()] = string(b)
}
htmlmap[target] = m
return m
}

type mockRequestRoundTripper struct {
request *http.Request
}

func (m *mockRequestRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
m.request = req
return &http.Response{
StatusCode: http.StatusOK,
Body: io.NopCloser(strings.NewReader("OK")),
}, nil
}

type captureFunc func() (method string, query, body *url.Values)

func (m *mockRequestRoundTripper) lastCaputure() (string, *url.Values, *url.Values) {
q := m.request.URL.Query()
var body *url.Values
if m.request.Body != nil {
b, _ := io.ReadAll(m.request.Body)
if bt, err := url.ParseQuery(string(b)); err == nil {
body = &bt
} else {
panic(errors.Wrapf(err, "cannot parse request body: %s", string(b)))
}
}
return m.request.Method, &q, body
}

func mockRequestClient() (*http.Client, captureFunc) {
m := &mockRequestRoundTripper{}
c := &http.Client{
Transport: m,
}
return c, m.lastCaputure
}

type mockResponseRoundTripper struct {
status int
body string
timeout bool
}

func (m *mockResponseRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
var wait <-chan time.Time
if m.timeout {
wait = time.After(1 * time.Second)
} else {
wait = time.After(0 * time.Second)
}
select {
case <-wait:
return &http.Response{
StatusCode: m.status,
Body: io.NopCloser(strings.NewReader(m.body)),
}, nil
case <-req.Context().Done():
return nil, errors.New("request canceled for timeout")
}
}

func mockResponseClient(status int, body string, timeout bool) *http.Client {
return &http.Client{
Transport: &mockResponseRoundTripper{status: status, body: body, timeout: timeout},
}
}
97 changes: 97 additions & 0 deletions crawler/login_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
package crawler_test

import (
"context"
"net/http"
"net/url"
"testing"
"time"

"github.com/meian/atgo/crawler"
"github.com/meian/atgo/crawler/requests"
"github.com/stretchr/testify/assert"
)

func TestLogin_Do_Request(t *testing.T) {
req := &requests.Login{
Username: "user",
Password: "pass",
CSRFToken: "token",
Continue: "ctn",
}
want := struct {
query *url.Values
body *url.Values
}{
query: &url.Values{"continue": {"ctn"}},
body: &url.Values{"username": {"user"}, "password": {"pass"}, "csrf_token": {"token"}},
}

assert := assert.New(t)
client, cFunc := mockRequestClient()
_, _ = crawler.NewLogin(client).Do(context.Background(), req)
method, query, body := cFunc()
assert.Equal(http.MethodPost, method)
assert.Equal(want.query, query)
assert.Equal(want.body, body)
}

func TestLogin_Do_Response(t *testing.T) {
type res struct {
status int
bodyFile string
timeout bool
}
type want struct {
err bool
loggedIn bool
}
tests := []struct {
name string
res res
want want
}{
{
name: "success",
res: res{status: http.StatusOK, bodyFile: "logged-in.html"},
want: want{loggedIn: true},
},
{
name: "forbidden",
res: res{status: http.StatusForbidden},
want: want{err: true},
},
{
name: "no html",
res: res{status: http.StatusOK, bodyFile: "no-html"},
want: want{err: false, loggedIn: false},
},
{
name: "timeout",
res: res{timeout: true},
want: want{err: true},
},
}
m := testHTMLMap(t, "login")
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert := assert.New(t)
ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond)
defer cancel()
client := mockResponseClient(tt.res.status, m.Get(tt.res.bodyFile), tt.res.timeout)
req := &requests.Login{Username: "user", Password: "pass"}
res, err := crawler.NewLogin(client).Do(ctx, req)
if tt.want.err {
if assert.Error(err) {
t.Logf("error: %v", err)
}
return
}
assert.NoError(err)
if !assert.NotNil(res) {
return
}
assert.Equal(tt.want.loggedIn, res.LoggedIn)
})
}
}
63 changes: 63 additions & 0 deletions crawler/requests/login_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package requests_test

import (
"net/url"
"testing"

"github.com/meian/atgo/crawler/requests"
"github.com/stretchr/testify/assert"
)

func TestLogin_URLValues(t *testing.T) {
type fields struct {
Username string
Password string
CSRFToken string
Continue string
}
tests := []struct {
name string
fields fields
want url.Values
}{
{
name: "user1",
fields: fields{
Username: "user1",
Password: "pass1",
CSRFToken: "token1",
Continue: "continue1",
},
want: url.Values{
"username": {"user1"},
"password": {"pass1"},
"csrf_token": {"token1"},
},
},
{
name: "user2",
fields: fields{
Username: "user2",
Password: "pass2",
CSRFToken: "token2",
Continue: "continue2",
},
want: url.Values{
"username": {"user2"},
"password": {"pass2"},
"csrf_token": {"token2"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := requests.Login{
Username: tt.fields.Username,
Password: tt.fields.Password,
CSRFToken: tt.fields.CSRFToken,
Continue: tt.fields.Continue,
}
assert.Equal(t, tt.want, r.URLValues())
})
}
}
Loading

0 comments on commit 3161fb6

Please sign in to comment.