Skip to content

Commit

Permalink
tooling: Update minimum go version to 1.22, update golangci-lint (#722)
Browse files Browse the repository at this point in the history
* go: Update to go 1.22, update golangci-lint config

* lint: Address various lint issues

* chains: fix lint complaint in TestApplyWithCanceledContext

* lint: Address addtional lint issues

* lint: Address addtional lint issues

* tools: update golangci-lint to 1.57
  • Loading branch information
tmc authored Mar 26, 2024
1 parent 3932b31 commit b6ba669
Show file tree
Hide file tree
Showing 32 changed files with 69 additions and 72 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
uses: golangci/[email protected]
with:
args: --timeout=4m
version: v1.55.1
version: v1.57.1
build-examples:
runs-on: ubuntu-latest
steps:
Expand Down
4 changes: 3 additions & 1 deletion .golangci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ linters:
- nolintlint # see https://github.com/golangci/golangci-lint/issues/3228.
- depguard # disabling temporarily
- ireturn # disabling temporarily
- perfsprint
- musttag

linters-settings:
cyclop:
Expand All @@ -48,5 +50,5 @@ linters-settings:
- "**/*_test.go"
- "**/mock/**/*.go"
run:
skip-dirs:
exclude-dirs:
- 'exp'
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ lint-all:
lint-deps:
@command -v golangci-lint >/dev/null 2>&1 || { \
echo >&2 "golangci-lint not found. Installing..."; \
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.1; \
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.57.1; \
}

.PHONY: docs
Expand Down
8 changes: 6 additions & 2 deletions chains/chains_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,16 @@ func TestApplyWithCanceledContext(t *testing.T) {
wg.Add(1)
c := NewLLMChain(&testLanguageModel{simulateWork: time.Second}, prompts.NewPromptTemplate("test", nil))

var applyErr error
go func() {
defer wg.Done()
_, err := Apply(ctx, c, inputs, maxWorkers)
require.Error(t, err)
_, applyErr = Apply(ctx, c, inputs, maxWorkers)
}()

cancelFunc()
wg.Wait()

if applyErr == nil || applyErr.Error() != "context canceled" {
t.Fatal("expected context canceled error, got:", applyErr)
}
}
2 changes: 0 additions & 2 deletions chains/sequential_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ func TestSimpleSequentialErrors(t *testing.T) {
}

for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
c, err := NewSimpleSequentialChain([]Chain{tc.chain})
Expand Down Expand Up @@ -179,7 +178,6 @@ func TestSequentialChainErrors(t *testing.T) {
}

for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
c, err := NewSequentialChain(tc.chains, []string{"input1", "input2"}, []string{"output"}, tc.seqChainOpts...)
Expand Down
1 change: 0 additions & 1 deletion chains/stuff_documents_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ func TestStuffDocuments_joinDocs(t *testing.T) {
chain := NewStuffDocuments(&LLMChain{})

for _, tc := range testcases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
got := chain.joinDocuments(tc.docs)
Expand Down
4 changes: 1 addition & 3 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
module github.com/tmc/langchaingo

go 1.21

toolchain go1.21.4
go 1.22

require (
github.com/google/uuid v1.6.0
Expand Down
3 changes: 1 addition & 2 deletions jsonschema/json_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ func TestDefinition_MarshalJSON(t *testing.T) { //nolint:funlen
}

for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
wantBytes := []byte(tt.want)
Expand All @@ -184,7 +183,7 @@ func TestDefinition_MarshalJSON(t *testing.T) { //nolint:funlen
}

got := structToMap(t, tt.def)
gotPtr := structToMap(t, &tt.def)
gotPtr := structToMap(t, &tt.def) //#nosec G601 -- false positive now that we're on go 1.22+

if !reflect.DeepEqual(got, want) {
t.Errorf("MarshalJSON() got = %v, want %v", got, want)
Expand Down
7 changes: 2 additions & 5 deletions llms/cache/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,19 +81,16 @@ func (c *Cacher) GenerateContent(ctx context.Context, messages []llms.MessageCon
return response, nil
}

// hashKeyForCache implements a hair-brained hashing scheme for the parameters to `GenerateContent`.
// It simply marshals all parameters as JSON and hashes the result.
// hashKeyForCache is a helper function that generates a unique key for a given
// set of messages and call options.
func hashKeyForCache(messages []llms.MessageContent, opts llms.CallOptions) (string, error) {
hash := sha256.New()
enc := json.NewEncoder(hash)

if err := enc.Encode(messages); err != nil {
return "", err
}

if err := enc.Encode(opts); err != nil {
return "", err
}

return hex.EncodeToString(hash.Sum(nil)), nil
}
59 changes: 37 additions & 22 deletions llms/cache/cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,33 @@ import (
func TestCache_hashKeyForCache(t *testing.T) {
t.Parallel()

rq := require.New(t)

cases := []struct {
name string
v1 []llms.MessageContent
v1opt []llms.CallOption
v2 []llms.MessageContent
shouldMatch bool
}{
{
name: "empty",
v1: []llms.MessageContent{},
v2: []llms.MessageContent{},
shouldMatch: true,
},
{
name: "empty vs non-empty",
v1: []llms.MessageContent{},
v2: []llms.MessageContent{{}},
shouldMatch: false,
},
{
name: "different options",
v1: []llms.MessageContent{{}},
v1opt: []llms.CallOption{llms.WithCandidateCount(1)},
v2: []llms.MessageContent{{}},
shouldMatch: false,
},
}
mustHashKeyForCache := func(messages []llms.MessageContent, options ...llms.CallOption) string {
var opts llms.CallOptions
for _, opt := range options {
Expand All @@ -26,26 +51,16 @@ func TestCache_hashKeyForCache(t *testing.T) {

return key
}

rq.Equal(
mustHashKeyForCache([]llms.MessageContent{}),
mustHashKeyForCache([]llms.MessageContent{}),
)

rq.Equal(
mustHashKeyForCache([]llms.MessageContent{}, llms.WithCandidateCount(1)),
mustHashKeyForCache([]llms.MessageContent{}, llms.WithCandidateCount(1)),
)

rq.NotEqual(
mustHashKeyForCache([]llms.MessageContent{{}}),
mustHashKeyForCache([]llms.MessageContent{}),
)

rq.NotEqual(
mustHashKeyForCache([]llms.MessageContent{}, llms.WithCandidateCount(1)),
mustHashKeyForCache([]llms.MessageContent{}),
)
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
v1hash := mustHashKeyForCache(tc.v1, tc.v1opt...)
v2hash := mustHashKeyForCache(tc.v2)
if (v1hash == v2hash) != tc.shouldMatch {
t.Fatalf("expected %v, got %v", tc.shouldMatch, v1hash == v2hash)
}
})
}
}

func TestCache_Call(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion llms/cloudflare/cloudflarellm.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageConten
// Look at all the parts in mc; expect to find a single Text part and
// any number of binary parts.
var text string
foundText := false
var foundText bool

for _, p := range mc.Parts {
switch pt := p.(type) {
Expand Down
2 changes: 1 addition & 1 deletion llms/cloudflare/internal/cloudflareclient/api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func TestClient_GenerateContent(t *testing.T) { // nolint:funlen
{Role: "user", Content: "userPrompt"},
},
Stream: true,
StreamingFunc: func(ctx context.Context, chunk []byte) error {
StreamingFunc: func(_ context.Context, chunk []byte) error {
if string(chunk) != `{"result": {"response": "response"}}` {
return io.EOF
}
Expand Down
1 change: 0 additions & 1 deletion llms/ernie/internal/ernieclient/ernieclient_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ func TestClient_buildURL(t *testing.T) {
}

for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
c := &Client{
Expand Down
1 change: 0 additions & 1 deletion llms/generatecontent_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ func TestTextParts(t *testing.T) {
}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
if got := TextParts(tt.args.role, tt.args.parts...); !reflect.DeepEqual(got, tt.want) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ func TestRunInference(t *testing.T) {
t.Cleanup(server.Close)

for _, tc := range tests {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
client, err := New("token", "model", server.URL)
Expand Down
2 changes: 1 addition & 1 deletion llms/llamafile/llamafilellm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func TestWithStreaming(t *testing.T) {

var sb strings.Builder
rsp, err := llm.GenerateContent(context.Background(), content,
llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error {
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error {
sb.Write(chunk)
return nil
}))
Expand Down
2 changes: 1 addition & 1 deletion llms/ollama/ollama_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ func TestWithStreaming(t *testing.T) {

var sb strings.Builder
rsp, err := llm.GenerateContent(context.Background(), content,
llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error {
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error {
sb.Write(chunk)
return nil
}))
Expand Down
2 changes: 1 addition & 1 deletion llms/openai/internal/openaiclient/chat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func TestParseStreamingChatResponse_FinishReason(t *testing.T) {
}

req := &ChatRequest{
StreamingFunc: func(ctx context.Context, chunk []byte) error {
StreamingFunc: func(_ context.Context, _ []byte) error {
return nil
},
}
Expand Down
2 changes: 1 addition & 1 deletion llms/openai/multicontent_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func TestWithStreaming(t *testing.T) {

var sb strings.Builder
rsp, err := llm.GenerateContent(context.Background(), content,
llms.WithStreamingFunc(func(ctx context.Context, chunk []byte) error {
llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error {
sb.Write(chunk)
return nil
}))
Expand Down
1 change: 0 additions & 1 deletion memory/window_buffer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ func TestConversationWindowBuffer_cutMessages(t *testing.T) { // nolint:funlen
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
wb := &ConversationWindowBuffer{
Expand Down
1 change: 0 additions & 1 deletion outputparser/structured_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ func TestStructured(t *testing.T) {
}

for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
parser := NewStructured(tc.responseSchema)
Expand Down
1 change: 0 additions & 1 deletion prompts/few_shot_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ func TestFewShotPrompt_Format(t *testing.T) {
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
t.Helper()
Expand Down
1 change: 0 additions & 1 deletion prompts/internal/fstring/fstring_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ func TestFormat(t *testing.T) {
{"4", args{"a= { val }", map[string]any{"val": 1}}, "a= 1", ""},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := Format(tt.args.format, tt.args.values)
Expand Down
1 change: 0 additions & 1 deletion prompts/prompt_template_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ func TestPromptTemplateFormatPrompt(t *testing.T) {
}

for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
p := PromptTemplate{
Expand Down
2 changes: 0 additions & 2 deletions prompts/templates_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ func TestInterpolateGoTemplate(t *testing.T) {
}

for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Run("go/template", func(t *testing.T) {
t.Parallel()
Expand Down Expand Up @@ -85,7 +84,6 @@ func TestInterpolateGoTemplate(t *testing.T) {
}

for _, tc := range errTestCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()

Expand Down
1 change: 0 additions & 1 deletion schema/chat_messages_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ func TestGetBufferString(t *testing.T) {
}

for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
result, err := schema.GetBufferString(tc.messages, tc.humanPrefix, tc.aiPrefix)
Expand Down
4 changes: 0 additions & 4 deletions textsplitter/markdown_splitter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -335,8 +335,6 @@ more text`,
}

for _, tc := range tt {
tc := tc // pin

t.Run(tc.name, func(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -441,8 +439,6 @@ func TestMarkdownHeaderTextSplitter_SplitInline(t *testing.T) {
}

for _, tc := range tt {
tc := tc // pin

t.Run(tc.name, func(t *testing.T) {
t.Parallel()

Expand Down
14 changes: 7 additions & 7 deletions textsplitter/token_splitter.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,20 +62,20 @@ func (s TokenSplitter) SplitText(text string) ([]string, error) {

func (s TokenSplitter) splitText(text string, tk *tiktoken.Tiktoken) []string {
splits := make([]string, 0)
inputIds := tk.Encode(text, s.AllowedSpecial, s.DisallowedSpecial)
inputIDs := tk.Encode(text, s.AllowedSpecial, s.DisallowedSpecial)

startIdx := 0
curIdx := len(inputIds)
curIdx := len(inputIDs)
if startIdx+s.ChunkSize < curIdx {
curIdx = startIdx + s.ChunkSize
}
for startIdx < len(inputIds) {
chunkIds := inputIds[startIdx:curIdx]
splits = append(splits, tk.Decode(chunkIds))
for startIdx < len(inputIDs) {
chunkIDs := inputIDs[startIdx:curIdx]
splits = append(splits, tk.Decode(chunkIDs))
startIdx += s.ChunkSize - s.ChunkOverlap
curIdx = startIdx + s.ChunkSize
if curIdx > len(inputIds) {
curIdx = len(inputIds)
if curIdx > len(inputIDs) {
curIdx = len(inputIDs)
}
}
return splits
Expand Down
2 changes: 1 addition & 1 deletion vectorstores/pgvector/pgvector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ func TestDeduplicater(t *testing.T) {
"type": "vegetable",
}},
}, vectorstores.WithDeduplicater(
func(ctx context.Context, doc schema.Document) bool {
func(_ context.Context, doc schema.Document) bool {
return doc.PageContent == "tokyo"
},
))
Expand Down
Loading

0 comments on commit b6ba669

Please sign in to comment.