diff --git a/src/Common/HashTable/TimeBucketHashMap.h b/src/Common/HashTable/TimeBucketHashMap.h index 827c396f8ef..0950980901f 100644 --- a/src/Common/HashTable/TimeBucketHashMap.h +++ b/src/Common/HashTable/TimeBucketHashMap.h @@ -9,15 +9,16 @@ template < typename Hash = DefaultHash, typename Grower = TimeBucketHashTableGrower<>, typename Allocator = HashTableAllocator, - template typename ImplTable = HashMapTable> + template typename ImplTable = HashMapTable, + size_t WindowOffset = 0> class TimeBucketHashMapTable - : public TimeBucketHashTable> + : public TimeBucketHashTable, WindowOffset> { public: using Impl = ImplTable; using LookupResult = typename Impl::LookupResult; - using TimeBucketHashTable>::TimeBucketHashTable; + using TimeBucketHashTable, WindowOffset>::TimeBucketHashTable; template void ALWAYS_INLINE forEachMapped(Func && func) @@ -64,10 +65,11 @@ template < typename Key, typename Mapped, typename Hash = DefaultHash, + size_t WindowOffset = 0, typename Grower = TimeBucketHashTableGrower<>, typename Allocator = HashTableAllocator, template typename ImplTable = HashMapTable> -using TimeBucketHashMap = TimeBucketHashMapTable, Hash, Grower, Allocator, ImplTable>; +using TimeBucketHashMap = TimeBucketHashMapTable, Hash, Grower, Allocator, ImplTable, WindowOffset>; template < typename Key, diff --git a/src/Common/HashTable/TimeBucketHashTable.h b/src/Common/HashTable/TimeBucketHashTable.h index ceea0c5b01d..fdf466b12d9 100644 --- a/src/Common/HashTable/TimeBucketHashTable.h +++ b/src/Common/HashTable/TimeBucketHashTable.h @@ -15,13 +15,30 @@ struct TimeBucketHashTableGrower : public HashTableGrower void increaseSize() { this->size_degree += this->size_degree >= 15 ? 1 : 2; } }; + +/** + * why need WindowOffset? what is it? + * In query such as 'select ... from tumble(stream, 5s) group by window_start, col', if the toatal length of group by key are fixed, + * and the col are nullable columns, in function 'packFixed', it will put the KeysNullMap(indicates which column of this row of data is null) in the front of the key, + * then put the window time key and other group by key behind it.But in TimeBucketHashTable::windowKey, we assume the window time key is in the front of the key, + * The key's layout is like: + * | key | + * +-----------------+------------+ + * | col, window time| KeysNullMap| + * +-----------------+------------+ low bit + * |WindowOffset| + * + * so we need to add a WindowOffset to indicate the length of the KeysNullMap, then we can get the window time key correctly. + * PS: The WindowOffset will only work in this situation(group by window_start and other nullable column), other situation will not be 0, and it will not affect the result. +*/ template < typename Key, typename Cell, typename Hash, typename Grower, typename Allocator, - typename ImplTable = HashTable> + typename ImplTable = HashTable, + size_t WindowOffset = 0> class TimeBucketHashTable : private boost::noncopyable, protected Hash /// empty base optimization { protected: @@ -49,7 +66,8 @@ class TimeBucketHashTable : private boost::noncopyable, protected Hash /// empty /// window time key is always: 4 or 8 bytes /// window time key are always lower bits of integral type of T /// key & 0xFFFF or 0xFFFFFFFF or 0xFFFFFFFFFFFFFFFF - return key & ((0xFFull << ((win_key_size - 1) << 3)) + ((1ull << ((win_key_size - 1) << 3)) - 1)); + + return (key >> (8 * WindowOffset)) & ((0xFFull << ((win_key_size - 1) << 3)) + ((1ull << ((win_key_size - 1) << 3)) - 1)); } ALWAYS_INLINE Int64 windowKey(StringRef key) diff --git a/src/DataTypes/Serializations/SerializationNullable.cpp b/src/DataTypes/Serializations/SerializationNullable.cpp index 0e261dcb6e7..6d9dfb3be28 100644 --- a/src/DataTypes/Serializations/SerializationNullable.cpp +++ b/src/DataTypes/Serializations/SerializationNullable.cpp @@ -5,6 +5,8 @@ #include #include +#include +#include #include #include #include @@ -14,6 +16,8 @@ #include #include +#include + namespace DB { @@ -622,13 +626,32 @@ void SerializationNullable::deserializeTextJSON(IColumn & column, ReadBuffer & i deserializeTextJSONImpl(column, istr, settings, nested); } -template -ReturnType SerializationNullable::deserializeTextJSONImpl(IColumn & column, ReadBuffer & istr, const FormatSettings & settings, - const SerializationPtr & nested) -{ - return safeDeserialize(column, *nested, - [&istr] { return checkStringByFirstCharacterAndAssertTheRest("null", istr); }, - [&nested, &istr, &settings] (IColumn & nested_column) { nested->deserializeTextJSON(nested_column, istr, settings); }); +template +ReturnType SerializationNullable::deserializeTextJSONImpl( + IColumn & column, ReadBuffer & istr, const FormatSettings & settings, const SerializationPtr & nested) +{ + return safeDeserialize( + column, + *nested, + [&istr, &column] { + if (column.isNullable()) + { + auto & column_nullable = dynamic_cast(column); + auto & nested_column = column_nullable.getNestedColumn(); + if (typeid(nested_column) != typeid(ColumnString) && typeid(nested_column) != typeid(ColumnFixedString)) + { + /// If the column is nullable and the value field is empty, we assume it is NULL. + /// eg. {"key": ""} + if (*istr.position() == '"' && (*(istr.position() + 1) == '"')) + { + istr.position() += 2; + return true; + } + } + } + return checkStringByFirstCharacterAndAssertTheRest("null", istr); + }, + [&nested, &istr, &settings](IColumn & nested_column) { nested->deserializeTextJSON(nested_column, istr, settings); }); } void SerializationNullable::serializeTextXML(const IColumn & column, size_t row_num, WriteBuffer & ostr, const FormatSettings & settings) const diff --git a/src/Interpreters/Streaming/Aggregator.cpp b/src/Interpreters/Streaming/Aggregator.cpp index aa227844a46..abf67616fcf 100644 --- a/src/Interpreters/Streaming/Aggregator.cpp +++ b/src/Interpreters/Streaming/Aggregator.cpp @@ -718,7 +718,7 @@ AggregatedDataVariants::Type Aggregator::chooseAggregationMethodTimeBucketTwoLev } /// Fallback case. - return AggregatedDataVariants::Type::serialized; + return AggregatedDataVariants::Type::time_bucket_serialized_two_level; } /// No key has been found to be nullable. diff --git a/src/Interpreters/Streaming/Aggregator.h b/src/Interpreters/Streaming/Aggregator.h index 253725bfe07..3b95fcf6b93 100644 --- a/src/Interpreters/Streaming/Aggregator.h +++ b/src/Interpreters/Streaming/Aggregator.h @@ -99,6 +99,10 @@ using TimeBucketAggregatedDataWithStringKeyTwoLevel = TimeBucketHashMapWithSaved using TimeBucketAggregatedDataWithKeys128TwoLevel = TimeBucketHashMap; using TimeBucketAggregatedDataWithKeys256TwoLevel = TimeBucketHashMap; +using TimeBucketAggregatedDataWithKeys128TwoLevelNullable = TimeBucketHashMap()>; +using TimeBucketAggregatedDataWithKeys256TwoLevelNullable = TimeBucketHashMap()>; + + class Aggregator; struct AggregatedDataMetrics; @@ -205,9 +209,8 @@ SERDE struct AggregatedDataVariants : private boost::noncopyable std::unique_ptr> time_bucket_keys256_two_level; /// Nullable - std::unique_ptr> time_bucket_nullable_keys128_two_level; - std::unique_ptr> time_bucket_nullable_keys256_two_level; - + std::unique_ptr> time_bucket_nullable_keys128_two_level; + std::unique_ptr> time_bucket_nullable_keys256_two_level; /// Low cardinality // std::unique_ptr>> streaming_low_cardinality_key32_two_level; // std::unique_ptr>> streaming_low_cardinality_key64_two_level; diff --git a/tests/Queries_bugs_limitation/1352/00395_nullable.reference b/tests/queries_ported/0_stateless/00395_nullable.reference similarity index 79% rename from tests/Queries_bugs_limitation/1352/00395_nullable.reference rename to tests/queries_ported/0_stateless/00395_nullable.reference index 9e090c32605..cff7540b5a4 100644 --- a/tests/Queries_bugs_limitation/1352/00395_nullable.reference +++ b/tests/queries_ported/0_stateless/00395_nullable.reference @@ -16,18 +16,7 @@ 1 1 a \N [1] [1] ['a'] ['a'] 2000-01-01 1 \N a a [1] [1] ['a'] ['a'] 2000-01-01 ----- TinyLog engine ----- -1 1 a a [1] [1] ['a'] ['a'] 2000-01-01 -1 1 a a [1] [1] ['a'] [NULL] 2000-01-01 -1 1 a a [1] [NULL] ['a'] ['a'] 2000-01-01 -1 1 a \N [1] [1] ['a'] ['a'] 2000-01-01 -1 \N a a [1] [1] ['a'] ['a'] 2000-01-01 ------ Log engine ----- -1 1 a a [1] [1] ['a'] ['a'] 2000-01-01 -1 1 a a [1] [1] ['a'] [NULL] 2000-01-01 -1 1 a a [1] [NULL] ['a'] ['a'] 2000-01-01 -1 1 a \N [1] [1] ['a'] ['a'] 2000-01-01 -1 \N a a [1] [1] ['a'] ['a'] 2000-01-01 ------ StripeLog engine ----- +0 1 1 a a [1] [1] ['a'] ['a'] 2000-01-01 1 1 a a [1] [1] ['a'] [NULL] 2000-01-01 1 1 a a [1] [NULL] ['a'] ['a'] 2000-01-01 @@ -51,12 +40,12 @@ 58 406 ----- isNull, isNotNull ----- -2 7 0 1 -5 1 0 1 -9 \N 0 0 -42 42 0 1 -\N 6 1 1 -\N \N 1 0 +2 7 false true +5 1 false true +9 \N false false +42 42 false true +\N 6 true true +\N \N true false ----- if_null, null_if ----- 2 7 2 5 1 5 @@ -102,6 +91,7 @@ \N \N ----- if ----- +0 a 1 uint8 b 1 uint8 c 0 uint8 @@ -162,6 +152,7 @@ d 2 3 ----- variable arrays ----- +0 2 \N 2 @@ -182,6 +173,7 @@ d \N 4 \N +0 a \N a @@ -202,11 +194,13 @@ ghij \N ghij \N +0 2 3 7 4 \N +0 a bc def @@ -214,108 +208,115 @@ ghij \N ----- has ----- ----- constant arrays ----- -1 -1 -1 -1 -0 -1 -1 -1 -1 -1 -1 -1 -0 -1 -1 -1 -0 -1 -1 -0 -1 -1 -1 -0 -1 -1 -1 +true +true +true +true +false +true +true +true +true +true +true +true +false +true +true +true +false +true +true +false +true +true +true +false +true +true +true ----- variable arrays ----- -1 -0 -1 -1 -0 -1 -1 -0 -1 0 +true +false +true +true +false +true +true +false +true +false +false +true +false +true +false +false +false +false +false +false +true +true +true +false +false +true +true +true +true +true 0 -1 -0 -1 -0 +true +false +true +true +false +true +true +false +true +false +true +true +true +false +false +false +true +false +true +false +true +true +true +true +true 0 +true +true +true +false 0 +true +true +true +false +true 0 +true +true +true +true 0 -0 -1 -1 -1 -0 -0 -1 -1 -1 -1 -1 -1 -0 -1 -1 -0 -1 -1 -0 -1 -0 -1 -1 -1 -0 -0 -0 -1 -0 -1 -0 -1 -1 -1 -1 -1 -1 -1 -1 -0 -1 -1 -1 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 +true +true +true +true +true ----- Aggregation ----- +0 A 0 2 A 1 2 B 1 4 @@ -323,18 +324,22 @@ C 1 1 C \N 3 \N 1 1 \N \N 1 +0 A 0 2 A 1 2 B 1 4 C 1 1 C \N 3 +0 A 4 B 4 C 4 \N 2 +0 0 2 1 7 \N 3 +0 0 2 1 0 3 1 1 2 2 @@ -342,6 +347,7 @@ C 4 1 5 3 \N 2 1 \N 3 2 +0 0 2 1 1 0 3 \N 1 1 2 1 1 @@ -353,6 +359,7 @@ C 4 \N 2 \N 1 \N 3 1 1 \N 3 \N 1 +0 [0] 2 [1] 7 [NULL] 3 diff --git a/tests/Queries_bugs_limitation/1352/00395_nullable.sql b/tests/queries_ported/0_stateless/00395_nullable.sql similarity index 87% rename from tests/Queries_bugs_limitation/1352/00395_nullable.sql rename to tests/queries_ported/0_stateless/00395_nullable.sql index 123426d4679..36b872df316 100644 --- a/tests/Queries_bugs_limitation/1352/00395_nullable.sql +++ b/tests/queries_ported/0_stateless/00395_nullable.sql @@ -23,7 +23,6 @@ INSERT INTO test1_00395 (col1, col2, col3, col4, col5, col6, col7, col8, d) VALU INSERT INTO test1_00395 (col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', NULL, [1], [1], ['a'], ['a'], '2000-01-01'); INSERT INTO test1_00395 (col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [NULL], ['a'], ['a'], '2000-01-01'); INSERT INTO test1_00395 (col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], [NULL], '2000-01-01'); -SELECT sleep(3); SELECT * FROM test1_00395 ORDER BY col1,col2,col3,col4,col5,col6,col7,col8 ASC; @@ -42,7 +41,6 @@ INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUE INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', NULL, [1], [1], ['a'], ['a'], '2000-01-01'); INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [NULL], ['a'], ['a'], '2000-01-01'); INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], [NULL], '2000-01-01'); -SELECT sleep(3); SELECT * FROM test1_00395 ORDER BY col1,col2,col3,col4,col5,col6,col7,col8 ASC; SELECT '----- TinyLog engine -----'; @@ -63,40 +61,11 @@ INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUE SELECT sleep(3); SELECT * FROM test1_00395 ORDER BY col1,col2,col3,col4,col5,col6,col7,col8 ASC; -SELECT '----- Log engine -----'; - -DROP STREAM IF EXISTS test1_00395; -create stream test1_00395( -col1 uint64, col2 nullable(uint64), -col3 string, col4 nullable(string), -col5 array(uint64), col6 array(nullable(uint64)), -col7 array(string), col8 array(nullable(string)), -d date) ; - -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, NULL, 'a', 'a', [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', NULL, [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [NULL], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], [NULL], '2000-01-01'); -SELECT sleep(3); -SELECT * FROM test1_00395 ORDER BY col1,col2,col3,col4,col5,col6,col7,col8 ASC; - - -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, NULL, 'a', 'a', [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', NULL, [1], [1], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [NULL], ['a'], ['a'], '2000-01-01'); -INSERT INTO test1_00395(col1, col2, col3, col4, col5, col6, col7, col8, d) VALUES (1, 1, 'a', 'a', [1], [1], ['a'], [NULL], '2000-01-01'); -SELECT sleep(3); -SELECT * FROM test1_00395 ORDER BY col1,col2,col3,col4,col5,col6,col7,col8 ASC; - - SELECT '----- Insert with expression -----'; DROP STREAM IF EXISTS test1_00395; create stream test1_00395(col1 array(nullable(uint64))) Engine=Memory; INSERT INTO test1_00395(col1) VALUES ([1+1]); -SELECT sleep(3); SELECT col1 FROM test1_00395 ORDER BY col1 ASC; SELECT '----- Insert. Source and target columns have same types up to nullability. -----'; @@ -106,7 +75,6 @@ DROP STREAM IF EXISTS test2; create stream test2(col1 uint64, col2 nullable(uint64)) Engine=Memory; INSERT INTO test1_00395(col1,col2) VALUES (2,7)(6,9)(5,1)(4,3)(8,2); INSERT INTO test2(col1,col2) SELECT col1,col2 FROM test1_00395; -SELECT sleep(3); SELECT col1,col2 FROM test2 ORDER BY col1,col2 ASC; SELECT '----- Apply functions and aggregate functions on columns that may contain null values -----'; @@ -114,14 +82,13 @@ SELECT '----- Apply functions and aggregate functions on columns that may contai DROP STREAM IF EXISTS test1_00395; create stream test1_00395(col1 nullable(uint64), col2 nullable(uint64)) Engine=Memory; INSERT INTO test1_00395(col1,col2) VALUES (2,7)(NULL,6)(9,NULL)(NULL,NULL)(5,1)(42,42); -SELECT sleep(3); SELECT col1, col2, col1 + col2, col1 * 7 FROM test1_00395 ORDER BY col1,col2 ASC; SELECT sum(col1) FROM test1_00395; SELECT sum(col1 * 7) FROM test1_00395; SELECT '----- isNull, isNotNull -----'; -SELECT col1, col2, isNull(col1), isNotNull(col2) FROM test1_00395 ORDER BY col1,col2 ASC; +SELECT col1, col2, is_null(col1), is_not_null(col2) FROM test1_00395 ORDER BY col1,col2 ASC; SELECT '----- if_null, null_if -----'; @@ -141,7 +108,7 @@ SELECT col1, col2, coalesce(col1, col2, 99) FROM test1_00395 ORDER BY col1, col2 SELECT '----- assumeNotNull -----'; -SELECT res FROM (SELECT col1, assumeNotNull(col1) AS res FROM test1_00395) WHERE col1 IS NOT NULL ORDER BY res ASC; +SELECT res FROM (SELECT col1, assume_not_null(col1) AS res FROM test1_00395) WHERE col1 IS NOT NULL ORDER BY res ASC; SELECT '----- IS NULL, IS NOT NULL -----'; @@ -176,15 +143,13 @@ SELECT multi_if(NULL, 2, true, 3, 4); SELECT multi_if(true, 2, NULL, 3, 4); DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 nullable(int8), col2 nullable(uint16), col3 nullable(Float32)) Engine=TinyLog; +create stream test1_00395(col1 nullable(int8), col2 nullable(uint16), col3 nullable(float32)) Engine=Memory; INSERT INTO test1_00395(col1,col2,col3) VALUES (to_int8(1),to_uint16(2),to_float32(3))(NULL,to_uint16(1),to_float32(2))(to_int8(1),NULL,to_float32(2))(to_int8(1),to_uint16(2),NULL); -SELECT sleep(3); SELECT multi_if(col1 == 1, col2, col2 == 2, col3, col3 == 3, col1, 42) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(cond1 nullable(uint8), then1 int8, cond2 uint8, then2 nullable(uint16), then3 nullable(Float32)) Engine=TinyLog; +create stream test1_00395(cond1 nullable(uint8), then1 int8, cond2 uint8, then2 nullable(uint16), then3 nullable(float32)) Engine=Memory; INSERT INTO test1_00395(cond1,then1,cond2,then2,then3) VALUES(1,1,1,42,99)(0,7,1,99,42)(NULL,6,2,99,NULL); -SELECT sleep(3); SELECT multi_if(cond1,then1,cond2,then2,then3) FROM test1_00395; SELECT '----- array functions -----'; @@ -214,16 +179,14 @@ SELECT array_element(['a',NULL,'c','d'], 3); SELECT array_element(['a',NULL,'c','d'], 4); DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 uint64) Engine=TinyLog; +create stream test1_00395(col1 uint64) Engine=Memory; INSERT INTO test1_00395(col1) VALUES(1),(2),(3),(4); -SELECT sleep(3); SELECT array_element([1,NULL,2,3], col1) FROM test1_00395; SELECT '----- variable arrays -----'; - DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(uint64))) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(uint64))); INSERT INTO test1_00395(col1) VALUES([2,3,7,NULL]); INSERT INTO test1_00395(col1) VALUES([NULL,3,7,4]); INSERT INTO test1_00395(col1) VALUES([2,NULL,7,NULL]); @@ -236,7 +199,7 @@ SELECT array_element(col1, 3) FROM test1_00395; SELECT array_element(col1, 4) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(string))) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(string))); INSERT INTO test1_00395(col1) VALUES(['a','bc','def',NULL]); INSERT INTO test1_00395(col1) VALUES([NULL,'bc','def','ghij']); INSERT INTO test1_00395(col1) VALUES(['a',NULL,'def',NULL]); @@ -249,7 +212,7 @@ SELECT array_element(col1, 3) FROM test1_00395; SELECT array_element(col1, 4) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(uint64)), col2 uint64) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(uint64)), col2 uint64); INSERT INTO test1_00395(col1,col2) VALUES([2,3,7,NULL], 1); INSERT INTO test1_00395(col1,col2) VALUES([NULL,3,7,4], 2); INSERT INTO test1_00395(col1,col2) VALUES([2,NULL,7,NULL], 3); @@ -259,7 +222,7 @@ SELECT sleep(3); SELECT array_element(col1,col2) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(string)), col2 uint64) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(string)), col2 uint64); INSERT INTO test1_00395(col1,col2) VALUES(['a','bc','def',NULL], 1); INSERT INTO test1_00395(col1,col2) VALUES([NULL,'bc','def','ghij'], 2); INSERT INTO test1_00395(col1,col2) VALUES(['a',NULL,'def','ghij'], 3); @@ -284,33 +247,29 @@ SELECT has(['a',NULL,'def','ghij'], 'def'); SELECT has(['a',NULL,'def','ghij'], 'ghij'); DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 uint64) Engine=TinyLog; +create stream test1_00395(col1 uint64) Engine=Memory; INSERT INTO test1_00395(col1) VALUES(1),(2),(3),(4); -SELECT sleep(3); SELECT has([1,NULL,2,3], col1) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 nullable(uint64)) Engine=TinyLog; +create stream test1_00395(col1 nullable(uint64)) Engine=Memory; INSERT INTO test1_00395(col1) VALUES(1),(2),(3),(4),(NULL); -SELECT sleep(3); SELECT has([1,NULL,2,3], col1) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 string) Engine=TinyLog; +create stream test1_00395(col1 string) Engine=Memory; INSERT INTO test1_00395(col1) VALUES('a'),('bc'),('def'),('ghij'); -SELECT sleep(3); SELECT has(['a',NULL,'def','ghij'], col1) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 nullable(string)) Engine=TinyLog; +create stream test1_00395(col1 nullable(string)) Engine=Memory; INSERT INTO test1_00395(col1) VALUES('a'),('bc'),('def'),('ghij'),(NULL); -SELECT sleep(3); SELECT has(['a',NULL,'def','ghij'], col1) FROM test1_00395; SELECT '----- variable arrays -----'; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(uint64))) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(uint64))); INSERT INTO test1_00395(col1) VALUES([2,3,7,NULL]); INSERT INTO test1_00395(col1) VALUES([NULL,3,7,4]); INSERT INTO test1_00395(col1) VALUES([2,NULL,7,NULL]); @@ -325,7 +284,7 @@ SELECT has(col1, 7) FROM test1_00395; SELECT has(col1, NULL) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(string))) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(string))); INSERT INTO test1_00395(col1) VALUES(['a','bc','def',NULL]); INSERT INTO test1_00395(col1) VALUES([NULL,'bc','def','ghij']); INSERT INTO test1_00395(col1) VALUES(['a',NULL,'def',NULL]); @@ -339,7 +298,7 @@ SELECT has(col1, 'ghij') FROM test1_00395; SELECT has(col1, NULL) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(uint64)), col2 uint64) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(uint64)), col2 uint64); INSERT INTO test1_00395(col1,col2) VALUES([2,3,7,NULL], 2); INSERT INTO test1_00395(col1,col2) VALUES([NULL,3,7,4], 3); INSERT INTO test1_00395(col1,col2) VALUES([2,NULL,7,NULL], 7); @@ -348,7 +307,7 @@ SELECT sleep(3); SELECT has(col1,col2) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(uint64)), col2 nullable(uint64)) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(uint64)), col2 nullable(uint64)); INSERT INTO test1_00395(col1,col2) VALUES([2,3,7,NULL], 2); INSERT INTO test1_00395(col1,col2) VALUES([NULL,3,7,4], 3); INSERT INTO test1_00395(col1,col2) VALUES([2,NULL,7,NULL], 7); @@ -358,7 +317,7 @@ SELECT sleep(3); SELECT has(col1,col2) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(string)), col2 string) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(string)), col2 string); INSERT INTO test1_00395(col1,col2) VALUES(['a','bc','def',NULL], 'a'); INSERT INTO test1_00395(col1,col2) VALUES([NULL,'bc','def','ghij'], 'bc'); INSERT INTO test1_00395(col1,col2) VALUES(['a',NULL,'def','ghij'], 'def'); @@ -367,7 +326,7 @@ SELECT sleep(3); SELECT has(col1,col2) FROM test1_00395; DROP STREAM IF EXISTS test1_00395; -create stream test1_00395(col1 array(nullable(string)), col2 nullable(string)) Engine=TinyLog; +create stream test1_00395(col1 array(nullable(string)), col2 nullable(string)); INSERT INTO test1_00395(col1,col2) VALUES(['a','bc','def',NULL], 'a'); INSERT INTO test1_00395(col1,col2) VALUES([NULL,'bc','def','ghij'], 'bc'); INSERT INTO test1_00395(col1,col2) VALUES(['a',NULL,'def','ghij'], 'def');