Skip to content

Commit

Permalink
[chore][pkg/stanza] Move tokenization tests into tokenizetest package (
Browse files Browse the repository at this point in the history
  • Loading branch information
djaglowski authored Aug 22, 2023
1 parent 1bf930d commit b13cc62
Show file tree
Hide file tree
Showing 5 changed files with 243 additions and 391 deletions.
1 change: 1 addition & 0 deletions pkg/stanza/operator/helper/encoding.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ type EncodingConfig struct {
// Deprecated: [v0.84.0] Use decoder.Decoder instead
type Decoder = decoder.Decoder

// Deprecated: [v0.84.0] Use decoder.New instead
var NewDecoder = decoder.New

// Deprecated: [v0.84.0] Use decoder.LookupEncoding instead
Expand Down
54 changes: 27 additions & 27 deletions pkg/stanza/operator/input/syslog/syslog_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ import (
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/operator"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/operator/input/tcp"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/operator/input/udp"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/operator/internal"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/operator/parser/syslog"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/pipeline"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/testutil"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/tokenize/tokenizetest"
)

var (
Expand Down Expand Up @@ -182,76 +182,76 @@ func NewConfigWithUDP(syslogCfg *syslog.BaseConfig) *Config {
}

func TestOctetFramingSplitFunc(t *testing.T) {
testCases := []internal.TokenizerTestCase{
testCases := []tokenizetest.TestCase{
{
Name: "OneLogSimple",
Raw: []byte(`17 my log LOGEND 123`),
ExpectedTokenized: []string{
Name: "OneLogSimple",
Input: []byte(`17 my log LOGEND 123`),
ExpectedTokens: []string{
`17 my log LOGEND 123`,
},
},
{
Name: "TwoLogsSimple",
Raw: []byte(`17 my log LOGEND 12317 my log LOGEND 123`),
ExpectedTokenized: []string{
Name: "TwoLogsSimple",
Input: []byte(`17 my log LOGEND 12317 my log LOGEND 123`),
ExpectedTokens: []string{
`17 my log LOGEND 123`,
`17 my log LOGEND 123`,
},
},
{
Name: "NoMatches",
Raw: []byte(`no matches in it`),
ExpectedTokenized: []string{
Name: "NoMatches",
Input: []byte(`no matches in it`),
ExpectedTokens: []string{
`no matches in it`,
},
},
{
Name: "NonMatchesAfter",
Raw: []byte(`17 my log LOGEND 123my log LOGEND 12317 my log LOGEND 123`),
ExpectedTokenized: []string{
Name: "NonMatchesAfter",
Input: []byte(`17 my log LOGEND 123my log LOGEND 12317 my log LOGEND 123`),
ExpectedTokens: []string{
`17 my log LOGEND 123`,
`my log LOGEND 12317 my log LOGEND 123`,
},
},
{
Name: "HugeLog100",
Raw: func() []byte {
newRaw := internal.GeneratedByteSliceOfLength(100)
Input: func() []byte {
newRaw := tokenizetest.GenerateBytes(100)
newRaw = append([]byte(`100 `), newRaw...)
return newRaw
}(),
ExpectedTokenized: []string{
`100 ` + string(internal.GeneratedByteSliceOfLength(100)),
ExpectedTokens: []string{
`100 ` + string(tokenizetest.GenerateBytes(100)),
},
},
{
Name: "maxCapacity",
Raw: func() []byte {
newRaw := internal.GeneratedByteSliceOfLength(4091)
Input: func() []byte {
newRaw := tokenizetest.GenerateBytes(4091)
newRaw = append([]byte(`4091 `), newRaw...)
return newRaw
}(),
ExpectedTokenized: []string{
`4091 ` + string(internal.GeneratedByteSliceOfLength(4091)),
ExpectedTokens: []string{
`4091 ` + string(tokenizetest.GenerateBytes(4091)),
},
},
{
Name: "over capacity",
Raw: func() []byte {
newRaw := internal.GeneratedByteSliceOfLength(4092)
Input: func() []byte {
newRaw := tokenizetest.GenerateBytes(4092)
newRaw = append([]byte(`5000 `), newRaw...)
return newRaw
}(),
ExpectedTokenized: []string{
`5000 ` + string(internal.GeneratedByteSliceOfLength(4091)),
ExpectedTokens: []string{
`5000 ` + string(tokenizetest.GenerateBytes(4091)),
`j`,
},
},
}
for _, tc := range testCases {
splitFunc, err := OctetMultiLineBuilder(nil)
require.NoError(t, err)
t.Run(tc.Name, tc.RunFunc(splitFunc))
t.Run(tc.Name, tc.Run(splitFunc))
}
}

Expand Down
119 changes: 0 additions & 119 deletions pkg/stanza/operator/internal/test_common.go

This file was deleted.

Loading

0 comments on commit b13cc62

Please sign in to comment.