From bfcb7693877bcec9df365ea5e644df9443b9016d Mon Sep 17 00:00:00 2001 From: Yahor Yuzefovich Date: Mon, 8 Jun 2020 16:50:49 -0700 Subject: [PATCH 1/2] colexec: add casts from datum-backed types to bools While investigating unrelated test failures, I added this cast, so we might as well merge it. Release note: None --- pkg/col/coldataext/datum_vec.go | 6 ++ pkg/sql/colexec/cast_test.go | 93 +++++++++++++------ pkg/sql/colexec/cast_tmpl.go | 10 +- .../colexec/execgen/cmd/execgen/cast_gen.go | 4 +- .../execgen/cmd/execgen/overloads_base.go | 6 +- .../execgen/cmd/execgen/overloads_cast.go | 60 +++++++++--- pkg/sql/colexec/utils_test.go | 49 +++++----- 7 files changed, 155 insertions(+), 73 deletions(-) diff --git a/pkg/col/coldataext/datum_vec.go b/pkg/col/coldataext/datum_vec.go index a46646a5e5a7..c6d5eac49265 100644 --- a/pkg/col/coldataext/datum_vec.go +++ b/pkg/col/coldataext/datum_vec.go @@ -70,6 +70,12 @@ func (d *Datum) CompareDatum(dVec, other interface{}) int { return d.Datum.Compare(dVec.(*datumVec).evalCtx, maybeUnwrapDatum(other)) } +// Cast returns the result of casting d to the type toType. dVec is the +// datumVec that stores d and is used to supply the eval context. +func (d *Datum) Cast(dVec interface{}, toType *types.T) (tree.Datum, error) { + return tree.PerformCast(dVec.(*datumVec).evalCtx, d.Datum, toType) +} + // Hash returns the hash of the datum as a byte slice. func (d *Datum) Hash(da *sqlbase.DatumAlloc) []byte { ed := sqlbase.EncDatum{Datum: maybeUnwrapDatum(d)} diff --git a/pkg/sql/colexec/cast_test.go b/pkg/sql/colexec/cast_test.go index 13172f645e0c..1cd164deee68 100644 --- a/pkg/sql/colexec/cast_test.go +++ b/pkg/sql/colexec/cast_test.go @@ -40,6 +40,7 @@ func TestRandomizedCast(t *testing.T) { Settings: st, }, } + rng, _ := randutil.NewPseudoRand() datumAsBool := func(d tree.Datum) interface{} { return bool(tree.MustBeDBool(d)) @@ -53,36 +54,65 @@ func TestRandomizedCast(t *testing.T) { datumAsDecimal := func(d tree.Datum) interface{} { return tree.MustBeDDecimal(d).Decimal } + datumAsColdataextDatum := func(datumVec coldata.DatumVec, d tree.Datum) interface{} { + datumVec.Set(0, d) + return datumVec.Get(0) + } + makeDatumVecAdapter := func(datumVec coldata.DatumVec) func(tree.Datum) interface{} { + return func(d tree.Datum) interface{} { + return datumAsColdataextDatum(datumVec, d) + } + } + + collatedStringType := types.MakeCollatedString(types.String, *sqlbase.RandCollationLocale(rng)) + collatedStringVec := testColumnFactory.MakeColumn(collatedStringType, 1 /* n */).(coldata.DatumVec) + getCollatedStringsThatCanBeCastAsBools := func() []tree.Datum { + var res []tree.Datum + for _, validString := range []string{"true", "false", "yes", "no"} { + d, err := tree.NewDCollatedString(validString, collatedStringType.Locale(), &tree.CollationEnvironment{}) + if err != nil { + t.Fatal(err) + } + res = append(res, d) + } + return res + } tc := []struct { fromTyp *types.T fromPhysType func(tree.Datum) interface{} toTyp *types.T toPhysType func(tree.Datum) interface{} - // Some types casting can fail, so retry if we - // generate a datum that is unable to be casted. + // getValidSet (when non-nil) is a function that returns a set of valid + // datums of fromTyp type that can be cast to toTyp type. The test + // harness will be randomly choosing a datum from this set. This + // function should be specified when sqlbase.RandDatum will take ages + // (if ever) to generate the datum that is valid for a cast. + getValidSet func() []tree.Datum + // Some types casting can fail, so retry if we generate a datum that is + // unable to be cast. retryGeneration bool }{ //bool -> t tests - {types.Bool, datumAsBool, types.Bool, datumAsBool, false}, - {types.Bool, datumAsBool, types.Int, datumAsInt, false}, - {types.Bool, datumAsBool, types.Float, datumAsFloat, false}, + {fromTyp: types.Bool, fromPhysType: datumAsBool, toTyp: types.Bool, toPhysType: datumAsBool}, + {fromTyp: types.Bool, fromPhysType: datumAsBool, toTyp: types.Int, toPhysType: datumAsInt}, + {fromTyp: types.Bool, fromPhysType: datumAsBool, toTyp: types.Float, toPhysType: datumAsFloat}, // decimal -> t tests - {types.Decimal, datumAsDecimal, types.Bool, datumAsBool, false}, + {fromTyp: types.Decimal, fromPhysType: datumAsDecimal, toTyp: types.Bool, toPhysType: datumAsBool}, // int -> t tests - {types.Int, datumAsInt, types.Bool, datumAsBool, false}, - {types.Int, datumAsInt, types.Float, datumAsFloat, false}, - {types.Int, datumAsInt, types.Decimal, datumAsDecimal, false}, + {fromTyp: types.Int, fromPhysType: datumAsInt, toTyp: types.Bool, toPhysType: datumAsBool}, + {fromTyp: types.Int, fromPhysType: datumAsInt, toTyp: types.Float, toPhysType: datumAsFloat}, + {fromTyp: types.Int, fromPhysType: datumAsInt, toTyp: types.Decimal, toPhysType: datumAsDecimal}, // float -> t tests - {types.Float, datumAsFloat, types.Bool, datumAsBool, false}, + {fromTyp: types.Float, fromPhysType: datumAsFloat, toTyp: types.Bool, toPhysType: datumAsBool}, // We can sometimes generate a float outside of the range of the integers, // so we want to retry with generation if that occurs. - {types.Float, datumAsFloat, types.Int, datumAsInt, true}, - {types.Float, datumAsFloat, types.Decimal, datumAsDecimal, false}, + {fromTyp: types.Float, fromPhysType: datumAsFloat, toTyp: types.Int, toPhysType: datumAsInt, retryGeneration: true}, + {fromTyp: types.Float, fromPhysType: datumAsFloat, toTyp: types.Decimal, toPhysType: datumAsDecimal}, + // datum-backed type -> t tests + {fromTyp: collatedStringType, fromPhysType: makeDatumVecAdapter(collatedStringVec), toTyp: types.Bool, toPhysType: datumAsBool, getValidSet: getCollatedStringsThatCanBeCastAsBools}, } - rng, _ := randutil.NewPseudoRand() - for _, c := range tc { t.Run(fmt.Sprintf("%sTo%s", c.fromTyp.String(), c.toTyp.String()), func(t *testing.T) { n := 100 @@ -90,29 +120,34 @@ func TestRandomizedCast(t *testing.T) { input := tuples{} output := tuples{} for i := 0; i < n; i++ { - // We don't allow any NULL datums to be generated, so disable - // this ability in the RandDatum function. - fromDatum := sqlbase.RandDatum(rng, c.fromTyp, false) var ( - toDatum tree.Datum - err error + fromDatum, toDatum tree.Datum + err error ) - toDatum, err = tree.PerformCast(&evalCtx, fromDatum, c.toTyp) - if c.retryGeneration { - for err != nil { - // If we are allowed to retry, make a new datum and cast it on error. - fromDatum = sqlbase.RandDatum(rng, c.fromTyp, false) - toDatum, err = tree.PerformCast(&evalCtx, fromDatum, c.toTyp) - } + if c.getValidSet != nil { + validFromDatums := c.getValidSet() + fromDatum = validFromDatums[rng.Intn(len(validFromDatums))] + toDatum, err = tree.PerformCast(&evalCtx, fromDatum, c.toTyp) } else { - if err != nil { - t.Fatal(err) + // We don't allow any NULL datums to be generated, so disable + // this ability in the RandDatum function. + fromDatum = sqlbase.RandDatum(rng, c.fromTyp, false) + toDatum, err = tree.PerformCast(&evalCtx, fromDatum, c.toTyp) + if c.retryGeneration { + for err != nil { + // If we are allowed to retry, make a new datum and cast it on error. + fromDatum = sqlbase.RandDatum(rng, c.fromTyp, false) + toDatum, err = tree.PerformCast(&evalCtx, fromDatum, c.toTyp) + } } } + if err != nil { + t.Fatal(err) + } input = append(input, tuple{c.fromPhysType(fromDatum)}) output = append(output, tuple{c.fromPhysType(fromDatum), c.toPhysType(toDatum)}) } - runTests(t, []tuples{input}, output, orderedVerifier, + runTestsWithTyps(t, []tuples{input}, [][]*types.T{{c.fromTyp}}, output, orderedVerifier, func(input []colexecbase.Operator) (colexecbase.Operator, error) { return createTestCastOperator(ctx, flowCtx, input[0], c.fromTyp, c.toTyp) }) diff --git a/pkg/sql/colexec/cast_tmpl.go b/pkg/sql/colexec/cast_tmpl.go index ff08c5a8ae50..37f97533bf7c 100644 --- a/pkg/sql/colexec/cast_tmpl.go +++ b/pkg/sql/colexec/cast_tmpl.go @@ -56,7 +56,7 @@ const _RIGHT_CANONICAL_TYPE_FAMILY = types.UnknownFamily // _RIGHT_TYPE_WIDTH is the template variable. const _RIGHT_TYPE_WIDTH = 0 -func _CAST(to, from interface{}) { +func _CAST(to, from, fromCol interface{}) { colexecerror.InternalError("") } @@ -104,7 +104,7 @@ func cast(inputVec, outputVec coldata.Vec, n int, sel []int) { } else { v := _L_UNSAFEGET(inputCol, i) var r _R_GO_TYPE - _CAST(r, v) + _CAST(r, v, inputCol) _R_SET(outputCol, i, r) } } @@ -116,7 +116,7 @@ func cast(inputVec, outputVec coldata.Vec, n int, sel []int) { } else { v := _L_UNSAFEGET(inputCol, i) var r _R_GO_TYPE - _CAST(r, v) + _CAST(r, v, inputCol) _R_SET(outputCol, i, r) } } @@ -127,7 +127,7 @@ func cast(inputVec, outputVec coldata.Vec, n int, sel []int) { for _, i := range sel { v := _L_UNSAFEGET(inputCol, i) var r _R_GO_TYPE - _CAST(r, v) + _CAST(r, v, inputCol) _R_SET(outputCol, i, r) } } else { @@ -135,7 +135,7 @@ func cast(inputVec, outputVec coldata.Vec, n int, sel []int) { for execgen.RANGE(i, inputCol, 0, n) { v := _L_UNSAFEGET(inputCol, i) var r _R_GO_TYPE - _CAST(r, v) + _CAST(r, v, inputCol) _R_SET(outputCol, i, r) } } diff --git a/pkg/sql/colexec/execgen/cmd/execgen/cast_gen.go b/pkg/sql/colexec/execgen/cmd/execgen/cast_gen.go index 4dc444480ed2..fa6a80ca83cc 100644 --- a/pkg/sql/colexec/execgen/cmd/execgen/cast_gen.go +++ b/pkg/sql/colexec/execgen/cmd/execgen/cast_gen.go @@ -30,8 +30,8 @@ func genCastOperators(inputFileContents string, wr io.Writer) error { ) s := r.Replace(inputFileContents) - castRe := makeFunctionRegex("_CAST", 2) - s = castRe.ReplaceAllString(s, makeTemplateFunctionCall("Right.Cast", 2)) + castRe := makeFunctionRegex("_CAST", 3) + s = castRe.ReplaceAllString(s, makeTemplateFunctionCall("Right.Cast", 3)) s = strings.ReplaceAll(s, "_L_SLICE", "execgen.SLICE") s = strings.ReplaceAll(s, "_L_UNSAFEGET", "execgen.UNSAFEGET") diff --git a/pkg/sql/colexec/execgen/cmd/execgen/overloads_base.go b/pkg/sql/colexec/execgen/cmd/execgen/overloads_base.go index 1bd4b8c47842..b05a5a4a4dcc 100644 --- a/pkg/sql/colexec/execgen/cmd/execgen/overloads_base.go +++ b/pkg/sql/colexec/execgen/cmd/execgen/overloads_base.go @@ -344,7 +344,7 @@ type twoArgsResolvedOverloadRightWidthInfo struct { type assignFunc func(op *lastArgWidthOverload, targetElem, leftElem, rightElem, targetCol, leftCol, rightCol string) string type compareFunc func(targetElem, leftElem, rightElem, leftCol, rightCol string) string -type castFunc func(to, from string) string +type castFunc func(to, from, fromCol string) string // Assign produces a Go source string that assigns the "targetElem" variable to // the result of applying the overload to the two inputs, "leftElem" and @@ -392,9 +392,9 @@ func (o *lastArgWidthOverload) Compare( leftElem, rightElem, targetElem, leftElem, rightElem, targetElem, targetElem) } -func (o *lastArgWidthOverload) Cast(to, from string) string { +func (o *lastArgWidthOverload) Cast(to, from, fromCol string) string { if o.CastFunc != nil { - if ret := o.CastFunc(to, from); ret != "" { + if ret := o.CastFunc(to, from, fromCol); ret != "" { return ret } } diff --git a/pkg/sql/colexec/execgen/cmd/execgen/overloads_cast.go b/pkg/sql/colexec/execgen/cmd/execgen/overloads_cast.go index f0cf1ce3b85e..ac2cbb4378ce 100644 --- a/pkg/sql/colexec/execgen/cmd/execgen/overloads_cast.go +++ b/pkg/sql/colexec/execgen/cmd/execgen/overloads_cast.go @@ -13,6 +13,7 @@ package main import ( "fmt" + "github.com/cockroachdb/cockroach/pkg/col/typeconv" "github.com/cockroachdb/cockroach/pkg/sql/colexecbase/colexecerror" "github.com/cockroachdb/cockroach/pkg/sql/types" ) @@ -56,15 +57,15 @@ func populateCastOverloads() { }, castTypeCustomizers) } -func intToDecimal(to, from string) string { +func intToDecimal(to, from, _ string) string { convStr := ` %[1]s = *apd.New(int64(%[2]s), 0) ` return fmt.Sprintf(convStr, to, from) } -func intToFloat() func(string, string) string { - return func(to, from string) string { +func intToFloat() func(string, string, string) string { + return func(to, from, _ string) string { convStr := ` %[1]s = float64(%[2]s) ` @@ -72,29 +73,29 @@ func intToFloat() func(string, string) string { } } -func intToInt16(to, from string) string { +func intToInt16(to, from, _ string) string { convStr := ` %[1]s = int16(%[2]s) ` return fmt.Sprintf(convStr, to, from) } -func intToInt32(to, from string) string { +func intToInt32(to, from, _ string) string { convStr := ` %[1]s = int32(%[2]s) ` return fmt.Sprintf(convStr, to, from) } -func intToInt64(to, from string) string { +func intToInt64(to, from, _ string) string { convStr := ` %[1]s = int64(%[2]s) ` return fmt.Sprintf(convStr, to, from) } -func floatToInt(intWidth, floatWidth int32) func(string, string) string { - return func(to, from string) string { +func floatToInt(intWidth, floatWidth int32) func(string, string, string) string { + return func(to, from, _ string) string { convStr := ` if math.IsNaN(float64(%[2]s)) || %[2]s <= float%[4]d(math.MinInt%[3]d) || %[2]s >= float%[4]d(math.MaxInt%[3]d) { colexecerror.ExpectedError(tree.ErrIntOutOfRange) @@ -108,14 +109,14 @@ func floatToInt(intWidth, floatWidth int32) func(string, string) string { } } -func numToBool(to, from string) string { +func numToBool(to, from, _ string) string { convStr := ` %[1]s = %[2]s != 0 ` return fmt.Sprintf(convStr, to, from) } -func floatToDecimal(to, from string) string { +func floatToDecimal(to, from, _ string) string { convStr := ` { var tmpDec apd.Decimal @@ -129,6 +130,19 @@ func floatToDecimal(to, from string) string { return fmt.Sprintf(convStr, to, from) } +func datumToBool(to, from, fromCol string) string { + convStr := ` + { + _castedDatum, err := %[2]s.(*coldataext.Datum).Cast(%[3]s, types.Bool) + if err != nil { + colexecerror.ExpectedError(err) + } + %[1]s = _castedDatum == tree.DBoolTrue + } + ` + return fmt.Sprintf(convStr, to, from, fromCol) +} + // castTypeCustomizer is a type customizer that changes how the templater // produces cast operator output for a particular type. type castTypeCustomizer interface { @@ -169,7 +183,8 @@ func registerCastTypeCustomizers() { for _, intWidth := range supportedWidthsByCanonicalTypeFamily[types.IntFamily] { registerCastTypeCustomizer(typePair{types.IntFamily, intWidth, types.IntFamily, intWidth}, intCustomizer{width: intWidth}) } - // TODO(yuzefovich): add casts for Timestamps and Intervals. + // TODO(yuzefovich): add casts for Timestamps, Intervals, and datum-backed + // types. // Casts from boolean. registerCastTypeCustomizer(typePair{types.BoolFamily, anyWidth, types.FloatFamily, anyWidth}, boolCastCustomizer{}) @@ -200,6 +215,9 @@ func registerCastTypeCustomizers() { registerCastTypeCustomizer(typePair{types.FloatFamily, anyWidth, toFamily, toWidth}, floatCastCustomizer{toFamily: toFamily, toWidth: toWidth}) } } + + // Casts from datum-backed types. + registerCastTypeCustomizer(typePair{typeconv.DatumVecCanonicalTypeFamily, anyWidth, types.BoolFamily, anyWidth}, datumCastCustomizer{toFamily: types.BoolFamily}) } // boolCastCustomizer specifies casts from booleans. @@ -220,8 +238,14 @@ type intCastCustomizer struct { toWidth int32 } +// datumCastCustomizer specifies casts from types that are backed by tree.Datum +// to other types. +type datumCastCustomizer struct { + toFamily types.Family +} + func (boolCastCustomizer) getCastFunc() castFunc { - return func(to, from string) string { + return func(to, from, _ string) string { convStr := ` %[1]s = 0 if %[2]s { @@ -233,7 +257,7 @@ func (boolCastCustomizer) getCastFunc() castFunc { } func (decimalCastCustomizer) getCastFunc() castFunc { - return func(to, from string) string { + return func(to, from, _ string) string { return fmt.Sprintf("%[1]s = %[2]s.Sign() != 0", to, from) } } @@ -274,3 +298,13 @@ func (c intCastCustomizer) getCastFunc() castFunc { // This code is unreachable, but the compiler cannot infer that. return nil } + +func (c datumCastCustomizer) getCastFunc() castFunc { + switch c.toFamily { + case types.BoolFamily: + return datumToBool + } + colexecerror.InternalError(fmt.Sprintf("unexpectedly didn't find a cast from datum-backed type to %s", c.toFamily)) + // This code is unreachable, but the compiler cannot infer that. + return nil +} diff --git a/pkg/sql/colexec/utils_test.go b/pkg/sql/colexec/utils_test.go index 5a7c110b8c76..151a7cb80c2f 100644 --- a/pkg/sql/colexec/utils_test.go +++ b/pkg/sql/colexec/utils_test.go @@ -603,20 +603,25 @@ func setColVal(vec coldata.Vec, idx int, val interface{}) { vec.Decimal()[idx].Set(decimalVal) } } else if canonicalTypeFamily == typeconv.DatumVecCanonicalTypeFamily { - switch vec.Type().Family() { - case types.JsonFamily: - if jsonStr, ok := val.(string); ok { - jobj, err := json.ParseJSON(jsonStr) - if err != nil { - colexecerror.InternalError( - fmt.Sprintf("unable to parse json object: %v: %v", jobj, err)) + switch v := val.(type) { + case *coldataext.Datum: + vec.Datum().Set(idx, v) + default: + switch vec.Type().Family() { + case types.JsonFamily: + if jsonStr, ok := val.(string); ok { + jobj, err := json.ParseJSON(jsonStr) + if err != nil { + colexecerror.InternalError( + fmt.Sprintf("unable to parse json object: %v: %v", jobj, err)) + } + vec.Datum().Set(idx, &tree.DJSON{JSON: jobj}) + } else if jobj, ok := val.(json.JSON); ok { + vec.Datum().Set(idx, &tree.DJSON{JSON: jobj}) } - vec.Datum().Set(idx, &tree.DJSON{JSON: jobj}) - } else if jobj, ok := val.(json.JSON); ok { - vec.Datum().Set(idx, &tree.DJSON{JSON: jobj}) + default: + colexecerror.InternalError(fmt.Sprintf("unexpected datum-backed type: %s", vec.Type())) } - default: - colexecerror.InternalError(fmt.Sprintf("unexpected datum-backed type: %s", vec.Type())) } } else { reflect.ValueOf(vec.Col()).Index(idx).Set(reflect.ValueOf(val).Convert(reflect.TypeOf(vec.Col()).Elem())) @@ -997,16 +1002,18 @@ func getTupleFromBatch(batch coldata.Batch, tupleIdx int) tuple { newDec.Set(&colDec[tupleIdx]) val = reflect.ValueOf(newDec) } else if vec.CanonicalTypeFamily() == typeconv.DatumVecCanonicalTypeFamily { - switch vec.Type().Family() { - case types.JsonFamily: - d := vec.Datum().Get(tupleIdx).(*coldataext.Datum).Datum - if d == tree.DNull { - val = reflect.ValueOf(tree.DNull) - } else { - val = reflect.ValueOf(d.(*tree.DJSON).JSON) + d := vec.Datum().Get(tupleIdx).(*coldataext.Datum) + if d.Datum == tree.DNull { + val = reflect.ValueOf(tree.DNull) + } else { + switch vec.Type().Family() { + case types.CollatedStringFamily: + val = reflect.ValueOf(d) + case types.JsonFamily: + val = reflect.ValueOf(d.Datum.(*tree.DJSON).JSON) + default: + colexecerror.InternalError(fmt.Sprintf("unexpected datum-backed type: %s", vec.Type())) } - default: - colexecerror.InternalError(fmt.Sprintf("unexpected datum-backed type: %s", vec.Type())) } } else { val = reflect.ValueOf(vec.Col()).Index(tupleIdx) From 743af040c42b612c72addb5d96d460d7031bbbdc Mon Sep 17 00:00:00 2001 From: Yahor Yuzefovich Date: Tue, 9 Jun 2020 17:05:02 -0700 Subject: [PATCH 2/2] colexec: add support for Values core with zero rows Release note: None --- pkg/sql/colexec/bool_vec_to_sel.go | 2 +- pkg/sql/colexec/execplan.go | 22 +++++++++++++++++- pkg/sql/colexec/operator.go | 23 +++++++++++++++++-- pkg/sql/colflow/vectorized_flow_space_test.go | 4 ++-- 4 files changed, 45 insertions(+), 6 deletions(-) diff --git a/pkg/sql/colexec/bool_vec_to_sel.go b/pkg/sql/colexec/bool_vec_to_sel.go index 040a3ddc125b..50cfc1513ee4 100644 --- a/pkg/sql/colexec/bool_vec_to_sel.go +++ b/pkg/sql/colexec/bool_vec_to_sel.go @@ -31,7 +31,7 @@ func boolOrUnknownToSelOp( // If the column is of an Unknown type, then all values in that column // must be NULLs, so the selection vector will always be empty, and we // can simply plan a zero operator. - return NewZeroOp(input), nil + return newZeroOp(input), nil default: return nil, errors.Errorf("unexpectedly %s is neither bool nor unknown", typs[vecIdx]) } diff --git a/pkg/sql/colexec/execplan.go b/pkg/sql/colexec/execplan.go index 063baff2b567..fe6a706c0bd2 100644 --- a/pkg/sql/colexec/execplan.go +++ b/pkg/sql/colexec/execplan.go @@ -192,6 +192,12 @@ func isSupported(mode sessiondata.VectorizeExecMode, spec *execinfrapb.Processor case core.Noop != nil: return nil + case core.Values != nil: + if core.Values.NumRows != 0 { + return errors.Newf("values core only with zero rows supported") + } + return nil + case core.TableReader != nil: if core.TableReader.IsCheck { return errors.Newf("scrub table reader is unsupported in vectorized") @@ -631,6 +637,20 @@ func NewColOperator( result.Op, result.IsStreaming = NewNoop(inputs[0]), true result.ColumnTypes = make([]*types.T, len(spec.Input[0].ColumnTypes)) copy(result.ColumnTypes, spec.Input[0].ColumnTypes) + + case core.Values != nil: + if err := checkNumIn(inputs, 0); err != nil { + return result, err + } + if core.Values.NumRows != 0 { + return result, errors.AssertionFailedf("values core only with zero rows supported, %d given", core.Values.NumRows) + } + result.Op, result.IsStreaming = NewZeroOpNoInput(), true + result.ColumnTypes = make([]*types.T, len(core.Values.Columns)) + for i, col := range core.Values.Columns { + result.ColumnTypes[i] = col.Type + } + case core.TableReader != nil: if err := checkNumIn(inputs, 0); err != nil { return result, err @@ -1368,7 +1388,7 @@ func (r *postProcessResult) planFilterExpr( if expr == tree.DNull { // The filter expression is tree.DNull meaning that it is always false, so // we put a zero operator. - r.Op = NewZeroOp(r.Op) + r.Op = newZeroOp(r.Op) return nil } var filterColumnTypes []*types.T diff --git a/pkg/sql/colexec/operator.go b/pkg/sql/colexec/operator.go index c94ad8712e3f..e3781acc9df0 100644 --- a/pkg/sql/colexec/operator.go +++ b/pkg/sql/colexec/operator.go @@ -185,8 +185,8 @@ type zeroOperator struct { var _ colexecbase.Operator = &zeroOperator{} -// NewZeroOp creates a new operator which just returns an empty batch. -func NewZeroOp(input colexecbase.Operator) colexecbase.Operator { +// newZeroOp creates a new operator which just returns an empty batch. +func newZeroOp(input colexecbase.Operator) colexecbase.Operator { return &zeroOperator{OneInputNode: NewOneInputNode(input)} } @@ -198,6 +198,25 @@ func (s *zeroOperator) Next(ctx context.Context) coldata.Batch { return coldata.ZeroBatch } +type zeroOperatorNoInput struct { + colexecbase.ZeroInputNode + NonExplainable +} + +var _ colexecbase.Operator = &zeroOperatorNoInput{} + +// NewZeroOpNoInput creates a new operator which just returns an empty batch +// and doesn't an input. +func NewZeroOpNoInput() colexecbase.Operator { + return &zeroOperatorNoInput{} +} + +func (s *zeroOperatorNoInput) Init() {} + +func (s *zeroOperatorNoInput) Next(ctx context.Context) coldata.Batch { + return coldata.ZeroBatch +} + type singleTupleNoInputOperator struct { colexecbase.ZeroInputNode NonExplainable diff --git a/pkg/sql/colflow/vectorized_flow_space_test.go b/pkg/sql/colflow/vectorized_flow_space_test.go index 409d9c44b6cf..d759296ab62c 100644 --- a/pkg/sql/colflow/vectorized_flow_space_test.go +++ b/pkg/sql/colflow/vectorized_flow_space_test.go @@ -82,9 +82,9 @@ func TestVectorizeInternalMemorySpaceError(t *testing.T) { for _, tc := range testCases { for _, success := range []bool{true, false} { t.Run(fmt.Sprintf("%s-success-expected-%t", tc.desc, success), func(t *testing.T) { - inputs := []colexecbase.Operator{colexec.NewZeroOp(nil)} + inputs := []colexecbase.Operator{colexec.NewZeroOpNoInput()} if len(tc.spec.Input) > 1 { - inputs = append(inputs, colexec.NewZeroOp(nil)) + inputs = append(inputs, colexec.NewZeroOpNoInput()) } memMon := mon.MakeMonitor("MemoryMonitor", mon.MemoryResource, nil, nil, 0, math.MaxInt64, st) if success {